Reasoning

Reasoning

Overview

Reasoning (or thinking) is a function that allows the chat to think before providing a final answer. This significantly delays the response speed, so users are given the option to view the AI assistant’s “thoughts.”

Quick start

To unlock the reasoning function, pass enableReasoning as a chat props.

Assistant’s thoughts can be passed within the onUserMessageSent function parameter reasoning (see the Messaging section for details).

ℹ️
Settings only apply to the message that is currently being processed. For the next message, you will need to set them up again.

Available common options:

  • pushChunk - pass part of the text. The new segment will be added to the previous one.
  • setFull - pass the entire text. The passed text will replace the previous one.

Example:

const onUserMessageSent = async (params: MessageSentParams) => {
  // recieving data...

  for await (const reasoningChunk of reasoningStream) {
    params.pushReasoningChunk(reasoningChunk);
  }

  // continue stream
}

Visualization

The CUI Kit supports different options for displaying the thinking process:

  • Segmented by headings (similar to Chat GPT)
  • Streamed text (like Claude).

By default, “headings” mode is enabled. However, if CUI Kit is unable to split the received text into headings, it will automatically switch to “stream”.

Segmentation by headings

Collapse code
Expand code
const onUserMessageSent = async ({ reasoning, ...params }: MessageSentParams) => {
  const reasoningStream = reasoningGenerator();

  for await (const reasoningChunk of reasoningStream) {
    reasoning.pushChunk(reasoningChunk);
  }

  const stream = streamGenerator();

  for await (const chunk of stream) {
    params.pushChunk(chunk);
  }
}
import * as React from "react";
import {
  ChatPage,
  Thread, useChatApiRef, MessageSentParams, ChatApiRef,
  useAssistantAnswerMock,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";
import Button from '@mui/material/Button';

const SendMessageRow: React.FC<{ apiRef: React.RefObject<ChatApiRef> }> = ({ apiRef }) => {
  const [isLoading, setIsLoading] = React.useState(false);

  const onClick = async () => {
    setIsLoading(true);
    await apiRef.current?.sendUserMessage('Run test');
    setIsLoading(false);
  };
  return (
    <Box width="100%" display="flex" justifyContent="center">
      <Button
        disabled={isLoading}
        variant="contained"
        sx={{ width: "min(70%, 300px)" }}
        onClick={onClick}
      >
        {"Send test Message\r"}
      </Button>
    </Box>
  );
}

const App: React.FC = () => {

  const [threads] = React.useState<Thread[]>([
    {
      id: "test-thread",
      title: "Reasoning test",
      messages: [
        {
          role: "user",
          content: "Hello!",
        },
        {
          role: "assistant",
          content: "Hello! Click the \"Send Message\" button to test the reasoning visualization.\n\nThinking will be divided into sections with headers.",
        },
      ],
    },
  ]);

  const { reasoningGenerator, streamGenerator } = useAssistantAnswerMock()

  const onUserMessageSent = async ({ reasoning, ...params }: MessageSentParams) => {
    const reasoningStream = reasoningGenerator();

    for await (const reasoningChunk of reasoningStream) {
      reasoning.pushChunk(reasoningChunk);
    }

    const stream = streamGenerator();

    for await (const chunk of stream) {
      params.pushChunk(chunk);
    }
  }

  const apiRef = useChatApiRef();

  return (
    <Box height="100dvh" width="100dvw">
      <ChatPage
        enableReasoning
        initialThread={threads[0]}
        threads={threads}
        apiRef={apiRef}
        slots={{
          messageRowInner: SendMessageRow,
        }}
        slotProps={{
          messageRowInner: { apiRef },
        }}
        onUserMessageSent={onUserMessageSent}
      />
    </Box>
  );
}

export default App;

Streamed text

Collapse code
Expand code
const onUserMessageSent = async ({ reasoning, ...params }: MessageSentParams) => {
  const reasoningStream = reasoningGenerator({ loremIpsum: 'large' });

  for await (const reasoningChunk of reasoningStream) {
    reasoning.pushChunk(reasoningChunk);
  }

  const stream = streamGenerator();

  for await (const chunk of stream) {
    params.pushChunk(chunk);
  }
}
import * as React from "react";
import {
  ChatPage,
  Thread, useChatApiRef, MessageSentParams, ChatApiRef,
  useAssistantAnswerMock,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";
import Button from '@mui/material/Button';

const SendMessageRow: React.FC<{ apiRef: React.RefObject<ChatApiRef> }> = ({ apiRef }) => {
  const [isLoading, setIsLoading] = React.useState(false);

  const onClick = async () => {
    setIsLoading(true);
    await apiRef.current?.sendUserMessage('Run test');
    setIsLoading(false);
  };
  return (
    <Box width="100%" display="flex" justifyContent="center">
      <Button
        disabled={isLoading}
        variant="contained"
        sx={{ width: "min(70%, 300px)" }}
        onClick={onClick}
      >
        {"Send test Message\r"}
      </Button>
    </Box>
  );
}

const App: React.FC = () => {

  const [threads] = React.useState<Thread[]>([
    {
      id: "test-thread",
      title: "Reasoning test",
      messages: [
        {
          role: "user",
          content: "Hello!",
        },
        {
          role: "assistant",
          content: "Hello! Click the \"Send Message\" button to test the reasoning visualization.\n\nA simulation of the AI assistant's stream of thoughts will be output.",
        },
      ],
    },
  ]);

  const { reasoningGenerator, streamGenerator } = useAssistantAnswerMock()

  const onUserMessageSent = async ({ reasoning, ...params }: MessageSentParams) => {
    const reasoningStream = reasoningGenerator({ loremIpsum: 'large' });

    for await (const reasoningChunk of reasoningStream) {
      reasoning.pushChunk(reasoningChunk);
    }

    const stream = streamGenerator();

    for await (const chunk of stream) {
      params.pushChunk(chunk);
    }
  }

  const apiRef = useChatApiRef();

  return (
    <Box height="100dvh" width="100dvw">
      <ChatPage
        enableReasoning
        initialThread={threads[0]}
        threads={threads}
        apiRef={apiRef}
        slots={{
          messageRowInner: SendMessageRow,
        }}
        slotProps={{
          messageRowInner: { apiRef },
        }}
        onUserMessageSent={onUserMessageSent}
      />
    </Box>
  );
}

export default App;

Reasoning history

You can display the saved chat reasoning in a new session by passing the reasoning parameter in the message data.

Collapse code
Expand code
const [threads] = React.useState<Thread[]>([
  {
    id: "test-thread",
    title: "Reasoning test",
    messages: [
      {
        role: "user",
        content: "Good morning!",
      },
      {
        role: "assistant",
        content: "Good morning! How can I assist you today?",
        reasoning: {
          text: reasoningText,
          timeSec: 150
        }
      },
    ],
  },
]);
import * as React from "react";
import {
  ChatPage,
  useAssistantAnswerMock,
  Thread, MessageSentParams,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";

const reasoningText = `
**Logical Analysis**  

The phrase "Good morning" is a standard greeting used in the morning. It expresses a wish for a good start to the day and serves as a way to establish a friendly connection between conversation participants. Logically, this greeting does not carry a hidden meaning; its main purpose is to create a positive atmosphere for communication.  

**Contextual Interpretation**  

This greeting is used in both formal and informal settings, usually between dawn and noon. The context of its use depends on the situation: in a work environment, it may serve as the beginning of a business conversation, while in a personal chat, it expresses friendliness. If the message is sent at a different time of day, it may be a mistake or a stylistic choice.  

**Emotional Evaluation**  

"Good morning" carries a positive emotional tone. It demonstrates friendliness, openness, and a wish for a pleasant day. If the recipient responds with a similar greeting, it confirms mutual politeness. Otherwise, the reaction may depend on the person's mood and the context of the conversation.  

**Cultural Significance**  

In many cultures, a morning greeting is an essential part of communication and may be accompanied by additional questions, such as "How are you?" or "How was your night?". Different languages have their own equivalents with various shades of meaning. For example, in English, "Good morning" is used similarly, while in Japanese, "おはようございます" (ohayou gozaimasu) has a more formal character.  
`

const App: React.FC = () => {
  const [threads] = React.useState<Thread[]>([
    {
      id: "test-thread",
      title: "Reasoning test",
      messages: [
        {
          role: "user",
          content: "Good morning!",
        },
        {
          role: "assistant",
          content: "Good morning! How can I assist you today?",
          reasoning: {
            text: reasoningText,
            timeSec: 150
          }
        },
      ],
    },
  ]);

  const { reasoningGenerator, streamGenerator } = useAssistantAnswerMock()

  const onUserMessageSent = async ({ reasoning, ...params }: MessageSentParams) => {
    const reasoningStream = reasoningGenerator();

    for await (const reasoningChunk of reasoningStream) {
      reasoning.pushChunk(reasoningChunk);
    }

    const stream = streamGenerator();

    for await (const chunk of stream) {
      params.pushChunk(chunk);
    }
  }

  return (
    <Box height="100dvh" width="100dvw">
      <ChatPage
        enableReasoning
        initialThread={threads[0]}
        threads={threads}
        onUserMessageSent={onUserMessageSent}
      />
    </Box>
  );
}

export default App;

Manual Control

You can manually control the visualization type, headings, and reasoning time.

Using one of these methods will cause CUI Kit to stop automatically executing that function (for example, if you manually set headings, CUI Kit will no longer attempt to find them). These restrictions apply only to the current message, for the next reasoning, all settings will revert to default.

To restore automatic control for the current reasoning, call the unlockAutoManagement function.

Available advanced options:

  • setTimeSec - pass the time that the model took for reasoning. If not provided, CUI Kit will calculate it automatically.
  • setTitle - set custom heading. By default, CUI Kit attempts to split the received text into headings on its own. However, you can provide your own.
  • setViewType - Select the visualization option between headings and stream.
  • unlockAutoManagement - Unlock automatic management of advanced options. You can provide a specific one, or unlock all of them at once.
Collapse code
Expand code
const onUserMessageSent = async ({ reasoning, ...params }: MessageSentParams) => {
  const reasoningStream = reasoningGenerator({ loremIpsum: 'large' });
  const neutralHeadings = [
    "Analysis and Interpretation", "Patterns and Trends", "Context and Meaning", "Logic and Structure",
    "Probability and Outcomes", "Neutral Observations", "Connections and Relations"
  ];

  params.setStatus('Awaiting response...');

  const thinkingTimeStart = performance.now();
  await new Promise(resolve => setTimeout(resolve, 3000));

  let i = 0;
  for await (const reasoningChunk of reasoningStream) {
    if (!(i % 20)) {
      const newTitle = neutralHeadings.shift();
      if (newTitle) reasoning.setTitle(newTitle);
    }
    reasoning.pushChunk(reasoningChunk);
    i++;
  }

  reasoning.setTimeSec((performance.now() - thinkingTimeStart) / 1000);

  const stream = streamGenerator();

  for await (const chunk of stream) {
    params.pushChunk(chunk);
  }
}
import * as React from "react";
import {
  ChatPage,
  Thread, useChatApiRef, MessageSentParams, ChatApiRef,
  useAssistantAnswerMock,
} from "@plteam/chat-ui";
import Box from "@mui/material/Box";
import Button from '@mui/material/Button';

const SendMessageRow: React.FC<{ apiRef: React.RefObject<ChatApiRef> }> = ({ apiRef }) => {
  const [isLoading, setIsLoading] = React.useState(false);

  const onClick = async () => {
    setIsLoading(true);
    await apiRef.current?.sendUserMessage('Run test');
    setIsLoading(false);
  };
  return (
    <Box width="100%" display="flex" justifyContent="center">
      <Button
        disabled={isLoading}
        variant="contained"
        sx={{ width: "min(70%, 300px)" }}
        onClick={onClick}
      >
        {"Send test Message\r"}
      </Button>
    </Box>
  );
}

const App: React.FC = () => {

  const [threads] = React.useState<Thread[]>([
    {
      id: "test-thread",
      title: "Reasoning test",
      messages: [
        {
          role: "user",
          content: "Hello!",
        },
        {
          role: "assistant",
          content: "Hello! Click the \"Send Message\" button to test the reasoning visualization.\n\nHeaders and reasoning time will be set manually.",
        },
      ],
    },
  ]);

  const { reasoningGenerator, streamGenerator } = useAssistantAnswerMock();

  const onUserMessageSent = async ({ reasoning, ...params }: MessageSentParams) => {
    const reasoningStream = reasoningGenerator({ loremIpsum: 'large' });
    const neutralHeadings = [
      "Analysis and Interpretation", "Patterns and Trends", "Context and Meaning", "Logic and Structure",
      "Probability and Outcomes", "Neutral Observations", "Connections and Relations"
    ];

    params.setStatus('Awaiting response...');

    const thinkingTimeStart = performance.now();
    await new Promise(resolve => setTimeout(resolve, 3000));

    let i = 0;
    for await (const reasoningChunk of reasoningStream) {
      if (!(i % 20)) {
        const newTitle = neutralHeadings.shift();
        if (newTitle) reasoning.setTitle(newTitle);
      }
      reasoning.pushChunk(reasoningChunk);
      i++;
    }

    reasoning.setTimeSec((performance.now() - thinkingTimeStart) / 1000);

    const stream = streamGenerator();

    for await (const chunk of stream) {
      params.pushChunk(chunk);
    }
  }

  const apiRef = useChatApiRef();

  return (
    <Box height="100dvh" width="100dvw">
      <ChatPage
        enableReasoning
        initialThread={threads[0]}
        threads={threads}
        apiRef={apiRef}
        slots={{
          messageRowInner: SendMessageRow,
        }}
        slotProps={{
          messageRowInner: { apiRef },
        }}
        onUserMessageSent={onUserMessageSent}
      />
    </Box>
  );
}

export default App;