From c8fae0256c91a0ebe495270aa15baa2f27211268 Mon Sep 17 00:00:00 2001 From: haoyuren <13851610112@163.com> Date: Thu, 12 Feb 2026 12:45:24 -0600 Subject: Multi-turn conversation, stop generation, SSE fix, and UI improvements - Multi-turn context: all council stages now receive conversation history (user messages + Stage 3 chairman responses) for coherent follow-ups - Stop generation: abort streaming mid-request, recover query to input box - SSE parsing: buffer-based chunking to prevent JSON split across packets - Atomic storage: user + assistant messages saved together after completion, preventing dangling messages on abort - GFM markdown: tables, strikethrough via remark-gfm plugin + table styles - Performance: memo user messages and completed assistant messages, only re-render the active streaming message - Model config: gpt-5.2, claude-opus-4.6 as chairman - Always show input box for multi-turn conversations Co-Authored-By: Claude Opus 4.6 --- frontend/src/components/ChatInterface.jsx | 175 ++++++++++++++++++------------ 1 file changed, 108 insertions(+), 67 deletions(-) (limited to 'frontend/src/components/ChatInterface.jsx') diff --git a/frontend/src/components/ChatInterface.jsx b/frontend/src/components/ChatInterface.jsx index 3ae796c..5f431c2 100644 --- a/frontend/src/components/ChatInterface.jsx +++ b/frontend/src/components/ChatInterface.jsx @@ -1,25 +1,102 @@ -import { useState, useEffect, useRef } from 'react'; +import { useState, useEffect, useRef, memo } from 'react'; import ReactMarkdown from 'react-markdown'; +import remarkGfm from 'remark-gfm'; import Stage1 from './Stage1'; import Stage2 from './Stage2'; import Stage3 from './Stage3'; import './ChatInterface.css'; +const remarkPlugins = [remarkGfm]; + +// Only memoize user messages (they never change once sent) +const UserMessage = memo(function UserMessage({ content }) { + return ( +
+
+
You
+
+
+ {content} +
+
+
+
+ ); +}); + +// Memoize completed assistant messages, but skip memo for the active (last) one +const AssistantMessage = memo(function AssistantMessage({ msg, isActive }) { + return ( +
+
+
LLM Council
+ + {/* Stage 1 */} + {msg.loading?.stage1 && ( +
+
+ Running Stage 1: Collecting individual responses... +
+ )} + {msg.stage1 && } + + {/* Stage 2 */} + {msg.loading?.stage2 && ( +
+
+ Running Stage 2: Peer rankings... +
+ )} + {msg.stage2 && ( + + )} + + {/* Stage 3 */} + {msg.loading?.stage3 && ( +
+
+ Running Stage 3: Final synthesis... +
+ )} + {msg.stage3 && } +
+
+ ); +}, (prevProps, nextProps) => { + // If active (streaming), always re-render + if (prevProps.isActive || nextProps.isActive) return false; + // Otherwise skip re-render (completed messages don't change) + return true; +}); + export default function ChatInterface({ conversation, onSendMessage, + onStopGeneration, isLoading, + pendingInput, + onPendingInputConsumed, }) { const [input, setInput] = useState(''); + const textareaRef = useRef(null); const messagesEndRef = useRef(null); - const scrollToBottom = () => { + useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); - }; + }, [conversation, isLoading]); + // Recover input from stopped generation useEffect(() => { - scrollToBottom(); - }, [conversation]); + if (pendingInput !== null) { + setInput(pendingInput); + onPendingInputConsumed(); + setTimeout(() => textareaRef.current?.focus(), 0); + } + }, [pendingInput]); const handleSubmit = (e) => { e.preventDefault(); @@ -30,7 +107,6 @@ export default function ChatInterface({ }; const handleKeyDown = (e) => { - // Submit on Enter (without Shift) if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleSubmit(e); @@ -57,57 +133,13 @@ export default function ChatInterface({

Ask a question to consult the LLM Council

) : ( - conversation.messages.map((msg, index) => ( -
- {msg.role === 'user' ? ( -
-
You
-
-
- {msg.content} -
-
-
- ) : ( -
-
LLM Council
- - {/* Stage 1 */} - {msg.loading?.stage1 && ( -
-
- Running Stage 1: Collecting individual responses... -
- )} - {msg.stage1 && } - - {/* Stage 2 */} - {msg.loading?.stage2 && ( -
-
- Running Stage 2: Peer rankings... -
- )} - {msg.stage2 && ( - - )} - - {/* Stage 3 */} - {msg.loading?.stage3 && ( -
-
- Running Stage 3: Final synthesis... -
- )} - {msg.stage3 && } -
- )} -
- )) + conversation.messages.map((msg, index) => { + if (msg.role === 'user') { + return ; + } + const isLastAssistant = isLoading && index === conversation.messages.length - 1; + return ; + }) )} {isLoading && ( @@ -120,9 +152,9 @@ export default function ChatInterface({
- {conversation.messages.length === 0 && ( -
+