bring back thought process

This commit is contained in:
Xuan Son Nguyen 2025-02-05 22:29:54 +01:00
parent 699e8e0fc7
commit 58499a8df9
6 changed files with 80 additions and 10 deletions

Binary file not shown.

View file

@ -10,6 +10,12 @@ import remarkMath from 'remark-math';
import remarkBreaks from 'remark-breaks';
import 'katex/dist/katex.min.css';
interface SplitMessage {
content: PendingMessage['content'];
thought?: string;
isThinking?: boolean;
}
export default function ChatMessage({
msg,
id,
@ -36,6 +42,34 @@ export default function ChatMessage({
[msg.timings]
);
const isPending: boolean = !!(msg as PendingMessage).convId;
// for reasoning model, we split the message into content and thought
// TODO: implement this as remark/rehype plugin in the future
const { content, thought, isThinking }: SplitMessage = useMemo(() => {
if (msg.content === null || msg.role !== 'assistant') {
return { content: msg.content };
}
let actualContent = '';
let thought = '';
let isThinking = false;
let thinkSplit = msg.content.split('<think>', 2);
actualContent += thinkSplit[0];
while (thinkSplit[1] !== undefined) {
// <think> tag found
thinkSplit = thinkSplit[1].split('</think>', 2);
thought += thinkSplit[0];
isThinking = true;
if (thinkSplit[1] !== undefined) {
// </think> closing tag found
isThinking = false;
thinkSplit = thinkSplit[1].split('<think>', 2);
actualContent += thinkSplit[0];
}
}
return { content: actualContent, thought, isThinking };
}, [msg]);
if (!viewingConversation) return null;
const regenerate = async () => {
@ -89,9 +123,10 @@ export default function ChatMessage({
</button>
</>
)}
{/* not editing content, render message */}
{editingContent === null && (
<>
{msg.content === null ? (
{content === null ? (
<>
{/* show loading dots for pending message */}
<span className="loading loading-dots loading-md"></span>
@ -100,7 +135,31 @@ export default function ChatMessage({
<>
{/* render message as markdown */}
<div dir="auto">
<MarkdownDisplay content={msg.content} />
{thought && (
<details
className="collapse bg-base-200 collapse-arrow mb-4"
open={isThinking && config.showThoughtInProgress}
>
<summary className="collapse-title">
{isPending && isThinking ? (
<span>
<span
v-if="isGenerating"
className="loading loading-spinner loading-md mr-2"
style={{ verticalAlign: 'middle' }}
></span>
<b>Thinking</b>
</span>
) : (
<b>Thought Process</b>
)}
</summary>
<div className="collapse-content">
<MarkdownDisplay content={thought} />
</div>
</details>
)}
<MarkdownDisplay content={content} />
</div>
</>
)}
@ -242,7 +301,7 @@ function MarkdownDisplay({ content }: { content: string }) {
export function preprocessLaTeX(content: string): string {
// Step 1: Protect code blocks
const codeBlocks: string[] = [];
content = content.replace(/(```[\s\S]*?```|`[^`\n]+`)/g, (match, code) => {
content = content.replace(/(```[\s\S]*?```|`[^`\n]+`)/g, (_, code) => {
codeBlocks.push(code);
return `<<CODE_BLOCK_${codeBlocks.length - 1}>>`;
});

View file

@ -45,6 +45,8 @@ export default function ChatScreen() {
// if user is creating a new conversation, redirect to the new conversation
navigate(`/chat/${convId}`);
}
scrollToBottom(false);
// auto scroll as message is being generated
const onChunk = () => scrollToBottom(true);
if (!(await sendMessage(convId, inputMsg, onChunk))) {
// restore the input message if failed

View file

@ -85,7 +85,9 @@ export default function SettingDialog({
/>
<label className="form-control mb-2">
<div className="label">System Message</div>
<div className="label">
System Message (will be disabled if left empty)
</div>
<textarea
className="textarea textarea-bordered h-24"
placeholder={`Default: ${CONFIG_DEFAULT.systemMessage}`}

View file

@ -20,6 +20,9 @@
@apply whitespace-pre-wrap rounded-lg p-2;
border: 1px solid currentColor;
}
p {
@apply mb-2;
}
/* TODO: fix markdown table */
}

View file

@ -93,7 +93,9 @@ export const AppContextProvider = ({
try {
// prepare messages for API
let messages: APIMessage[] = [
{ role: 'system', content: config.systemMessage },
...(config.systemMessage.length === 0
? []
: [{ role: 'system', content: config.systemMessage } as APIMessage]),
...normalizeMsgsForAPI(currConversation?.messages ?? []),
];
if (config.excludeThoughtOnReq) {
@ -243,12 +245,14 @@ export const AppContextProvider = ({
StorageUtils.filterAndKeepMsgs(convId, (msg) => msg.id < origMsgId);
if (content) {
// case: replace user message then generate assistant message
await sendMessage(convId, content, onChunk);
} else {
// case: generate last assistant message
await generateMessage(convId, onChunk);
StorageUtils.appendMsg(convId, {
id: Date.now(),
role: 'user',
content,
});
}
await generateMessage(convId, onChunk);
};
const saveConfig = (config: typeof CONFIG_DEFAULT) => {