Messaging
Overview
To send a message to the server, use the callback function onUserMessageSent
, which the chat will call after the user has sent a message.
You can stream assistant answer (if your server supports the stream API) or print the entire message at once, using the parameters provided by the function.
Messaging control
The function onUserMessageSent
in the arguments passes parameters - MessageSentParams
. For a full list of parameters, refer to the API - MessageSentParams page.
Sequence of actions
- You send the user’s message to your server.
- After receiving the response, call the function
pushChunk
with a part of the assistant’s message (if using the Stream API), or call the functionsetText
to provide the full message. - Call the function
onFinish
to complete the waiting for the response.
Push Chunks
The provided text chunk is added to the previous one
const { streamGenerator } = useAssistantAnswerMock()
const onUserMessageSent = React.useCallback(async (params: MessageSentParams) => {
const stream = streamGenerator(undefined, { delay: 100 });
for await (const chunk of stream) {
params.pushChunk(chunk ?? '');
}
params.onFinish();
}, []);
import * as React from "react";
import {
ChatPage,
Thread, useChatApiRef, MessageSentParams, useAssistantAnswerMock, ChatApiRef,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";
import Button from '@mui/material/Button';
const SendMessageRow: React.FC<{ apiRef: React.RefObject<ChatApiRef> }> = ({ apiRef }) => {
const [isLoading, setIsLoading] = React.useState(false);
const onClick = async () => {
setIsLoading(true);
await apiRef.current?.sendUserMessage('Run test');
setIsLoading(false);
};
return (
<Box width={"100%"} display={"flex"} justifyContent={"center"}>
<Button
onClick={onClick}
disabled={isLoading}
variant="contained"
sx={{ width: "min(70%, 300px)" }}
>
Send test Message
</Button>
</Box>
);
}
const App: React.FC = () => {
const [threads] = React.useState<Thread[]>([
{
id: "test-thread",
title: "Messaging test",
messages: [
{
role: "user",
content: "Hello!",
},
{
role: "assistant",
content: "Hello! Click the \"Send Message\" button to test the `pushChunks` function.",
},
],
},
]);
const { streamGenerator } = useAssistantAnswerMock()
const onUserMessageSent = React.useCallback(async (params: MessageSentParams) => {
const stream = streamGenerator(undefined, { delay: 100 });
for await (const chunk of stream) {
params.pushChunk(chunk ?? '');
}
params.onFinish();
}, []);
const apiRef = useChatApiRef();
return (
<Box height={"100dvh"} width={"100dvw"}>
<ChatPage
initialThread={threads[0]}
threads={threads}
onUserMessageSent={onUserMessageSent}
apiRef={apiRef}
slots={{
messageRowInner: SendMessageRow,
}}
slotProps={{
messageRowInner: { apiRef },
}}
/>
</Box>
);
}
export default App;
Push Text
It will replace the current assistant text with the one you provided.
const onUserMessageSent = React.useCallback(async (params: MessageSentParams) => {
await new Promise(resolve => setTimeout(resolve, 5000));
const testText = generateRandomLoremIpsum('medium');
params.setText(testText);
params.onFinish();
}, []);
import * as React from "react";
import {
ChatPage,
Thread, useChatApiRef, MessageSentParams, ChatApiRef,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";
import Button from '@mui/material/Button';
import { generateRandomLoremIpsum } from '../../../packages/chat-ui/src/utils/stringUtils/generateLoremIpsum.ts';
const SendMessageRow: React.FC<{ apiRef: React.RefObject<ChatApiRef> }> = ({ apiRef }) => {
const [isLoading, setIsLoading] = React.useState(false);
const onClick = async () => {
setIsLoading(true);
await apiRef.current?.sendUserMessage('Run test');
setIsLoading(false);
};
return (
<Box width={"100%"} display={"flex"} justifyContent={"center"}>
<Button
onClick={onClick}
disabled={isLoading}
variant="contained"
sx={{ width: "min(70%, 300px)" }}
>
Send test Message
</Button>
</Box>
);
}
const App: React.FC = () => {
const [threads] = React.useState<Thread[]>([
{
id: "test-thread",
title: "Messaging test",
messages: [
{
role: "user",
content: "Hello!",
},
{
role: "assistant",
content: "Hello! Click the \"Send Message\" button to test the `setText` function.\n\nThe message will appear in full after a short pause that simulates receiving a response from the server.",
},
],
},
]);
const onUserMessageSent = React.useCallback(async (params: MessageSentParams) => {
await new Promise(resolve => setTimeout(resolve, 5000));
const testText = generateRandomLoremIpsum('medium');
params.setText(testText);
params.onFinish();
}, []);
const apiRef = useChatApiRef();
return (
<Box height={"100dvh"} width={"100dvw"}>
<ChatPage
initialThread={threads[0]}
threads={threads}
onUserMessageSent={onUserMessageSent}
apiRef={apiRef}
slots={{
messageRowInner: SendMessageRow,
}}
slotProps={{
messageRowInner: { apiRef },
}}
/>
</Box>
);
}
export default App;
Finishing
Waiting for the assistant’s response will be terminated automatically after the onUserMessageSent function is executed.
However, there may be cases where you continue to receive server information that isn’t necessary for the user, and you don’t want them to wait for the stream to complete in order to interact with the chat. In such cases, you can call the onFinish function before the stream ends.
const onUserMessageSent = React.useCallback(async (params: MessageSentParams) => {
openSnackbar('Message sent!');
const stream = streamGenerator(undefined, { delay: 100 });
for await (const chunk of stream) {
params.pushChunk(chunk ?? '');
}
openSnackbar('Text received, calling onFinish, but function execution continues');
params.onFinish();
// Receiving some additional data
await new Promise(resolve => setTimeout(resolve, 5000));
openSnackbar('Stream completed');
}, []);
import * as React from "react";
import {
ChatPage,
Thread, useChatApiRef, MessageSentParams, useAssistantAnswerMock, ChatApiRef,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";
import Button from '@mui/material/Button';
import Snackbar, { SnackbarCloseReason } from '@mui/material/Snackbar';
import IconButton from '@mui/material/IconButton';
import CloseIcon from '@mui/icons-material/Close';
const SendMessageRow: React.FC<{ apiRef: React.RefObject<ChatApiRef> }> = ({ apiRef }) => {
const [isLoading, setIsLoading] = React.useState(false);
const onClick = async () => {
setIsLoading(true);
await apiRef.current?.sendUserMessage('Run test');
setIsLoading(false);
};
return (
<Box width={"100%"} display={"flex"} justifyContent={"center"}>
<Button
onClick={onClick}
disabled={isLoading}
variant="contained"
sx={{ width: "min(70%, 300px)" }}
>
Send test Message
</Button>
</Box>
);
}
const App: React.FC = () => {
const [open, setOpen] = React.useState(false);
const [text, setText] = React.useState('');
const [threads] = React.useState<Thread[]>([
{
id: "test-thread",
title: "Messaging test",
messages: [
{
role: "user",
content: "Hello!",
},
{
role: "assistant",
content: "Hello! Click the \"Send Message\" button to test the `onFinish` function.\n\nFor the user, the stream will finish sooner, so they won't have to wait for any technical information.",
},
],
},
]);
const { streamGenerator } = useAssistantAnswerMock()
const openSnackbar = (text: string) => {
setText(text);
setOpen(true);
};
const onUserMessageSent = React.useCallback(async (params: MessageSentParams) => {
openSnackbar('Message sent!');
const stream = streamGenerator(undefined, { delay: 100 });
for await (const chunk of stream) {
params.pushChunk(chunk ?? '');
}
openSnackbar('Text received, calling onFinish, but function execution continues');
params.onFinish();
// Receiving some additional data
await new Promise(resolve => setTimeout(resolve, 5000));
openSnackbar('Stream completed');
}, []);
const apiRef = useChatApiRef();
const handleClose = (_event: React.SyntheticEvent | Event, reason?: SnackbarCloseReason) => {
if (reason === 'clickaway') {
return;
}
setOpen(false);
};
const snackBarActions = React.useMemo(() => (
<IconButton
size="small"
aria-label="close"
color="inherit"
onClick={handleClose}
>
<CloseIcon fontSize="small" />
</IconButton>
), [handleClose]);
return (
<>
<Box height={"100dvh"} width={"100dvw"}>
<ChatPage
initialThread={threads[0]}
threads={threads}
onUserMessageSent={onUserMessageSent}
apiRef={apiRef}
slots={{
messageRowInner: SendMessageRow,
}}
slotProps={{
messageRowInner: { apiRef },
}}
/>
</Box>
<Snackbar
open={open}
onClose={handleClose}
message={text}
action={snackBarActions}
/>
</>
);
}
export default App;
Awaiting status
About the waiting status, see the Awaiting Response section.
Forced stop of message streaming
Pass the prop handleStopMessageStreaming, which the chat will call when the “stop” button is pressed during the streaming of the assistant’s response.
Mock API
For testing the chat functions, you can use our simple React Hook useAssistantAnswerMock
, which will return onUserMessageSent
for simulating the streaming of the assistant’s response, and handleStopMessageStreaming
, which will stop the streaming.
const { onUserMessageSent, handleStopMessageStreaming } =
useAssistantAnswerMock();
Example
<ChatPage
initialThread={threads[0]}
threads={threads}
handleStopMessageStreaming={handleStopMessageStreaming}
onUserMessageSent={onUserMessageSent}
/>
import * as React from "react";
import {
ChatPage,
useAssistantAnswerMock,
Thread,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";
const App: React.FC = () => {
const [threads] = React.useState<Thread[]>([
{
id: "test-thread",
title: "Welcome message",
messages: [
{
role: "user",
content: "Hello!",
},
{
role: "assistant",
content: "Hello there! How can I assist you today?",
},
],
},
]);
const { onUserMessageSent, handleStopMessageStreaming } =
useAssistantAnswerMock();
return (
<Box height={"100dvh"} width={"100%"}>
<ChatPage
initialThread={threads[0]}
threads={threads}
handleStopMessageStreaming={handleStopMessageStreaming}
onUserMessageSent={onUserMessageSent}
/>
</Box>
);
}
export default App;