diff options
| author | haoyuren <13851610112@163.com> | 2026-02-12 12:45:24 -0600 |
|---|---|---|
| committer | haoyuren <13851610112@163.com> | 2026-02-12 12:45:24 -0600 |
| commit | c8fae0256c91a0ebe495270aa15baa2f27211268 (patch) | |
| tree | efc908a9fb259a18809ab5151a15fc0f1e10fdf1 /frontend/src/components/ChatInterface.jsx | |
| parent | 92e1fccb1bdcf1bab7221aa9ed90f9dc72529131 (diff) | |
Multi-turn conversation, stop generation, SSE fix, and UI improvements
- Multi-turn context: all council stages now receive conversation history
(user messages + Stage 3 chairman responses) for coherent follow-ups
- Stop generation: abort streaming mid-request, recover query to input box
- SSE parsing: buffer-based chunking to prevent JSON split across packets
- Atomic storage: user + assistant messages saved together after completion,
preventing dangling messages on abort
- GFM markdown: tables, strikethrough via remark-gfm plugin + table styles
- Performance: memo user messages and completed assistant messages, only
re-render the active streaming message
- Model config: gpt-5.2, claude-opus-4.6 as chairman
- Always show input box for multi-turn conversations
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Diffstat (limited to 'frontend/src/components/ChatInterface.jsx')
| -rw-r--r-- | frontend/src/components/ChatInterface.jsx | 175 |
1 files changed, 108 insertions, 67 deletions
diff --git a/frontend/src/components/ChatInterface.jsx b/frontend/src/components/ChatInterface.jsx index 3ae796c..5f431c2 100644 --- a/frontend/src/components/ChatInterface.jsx +++ b/frontend/src/components/ChatInterface.jsx @@ -1,25 +1,102 @@ -import { useState, useEffect, useRef } from 'react'; +import { useState, useEffect, useRef, memo } from 'react'; import ReactMarkdown from 'react-markdown'; +import remarkGfm from 'remark-gfm'; import Stage1 from './Stage1'; import Stage2 from './Stage2'; import Stage3 from './Stage3'; import './ChatInterface.css'; +const remarkPlugins = [remarkGfm]; + +// Only memoize user messages (they never change once sent) +const UserMessage = memo(function UserMessage({ content }) { + return ( + <div className="message-group"> + <div className="user-message"> + <div className="message-label">You</div> + <div className="message-content"> + <div className="markdown-content"> + <ReactMarkdown remarkPlugins={remarkPlugins}>{content}</ReactMarkdown> + </div> + </div> + </div> + </div> + ); +}); + +// Memoize completed assistant messages, but skip memo for the active (last) one +const AssistantMessage = memo(function AssistantMessage({ msg, isActive }) { + return ( + <div className="message-group"> + <div className="assistant-message"> + <div className="message-label">LLM Council</div> + + {/* Stage 1 */} + {msg.loading?.stage1 && ( + <div className="stage-loading"> + <div className="spinner"></div> + <span>Running Stage 1: Collecting individual responses...</span> + </div> + )} + {msg.stage1 && <Stage1 responses={msg.stage1} />} + + {/* Stage 2 */} + {msg.loading?.stage2 && ( + <div className="stage-loading"> + <div className="spinner"></div> + <span>Running Stage 2: Peer rankings...</span> + </div> + )} + {msg.stage2 && ( + <Stage2 + rankings={msg.stage2} + labelToModel={msg.metadata?.label_to_model} + aggregateRankings={msg.metadata?.aggregate_rankings} + /> + )} + + {/* Stage 3 */} + {msg.loading?.stage3 && ( + <div className="stage-loading"> + <div className="spinner"></div> + <span>Running Stage 3: Final synthesis...</span> + </div> + )} + {msg.stage3 && <Stage3 finalResponse={msg.stage3} />} + </div> + </div> + ); +}, (prevProps, nextProps) => { + // If active (streaming), always re-render + if (prevProps.isActive || nextProps.isActive) return false; + // Otherwise skip re-render (completed messages don't change) + return true; +}); + export default function ChatInterface({ conversation, onSendMessage, + onStopGeneration, isLoading, + pendingInput, + onPendingInputConsumed, }) { const [input, setInput] = useState(''); + const textareaRef = useRef(null); const messagesEndRef = useRef(null); - const scrollToBottom = () => { + useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); - }; + }, [conversation, isLoading]); + // Recover input from stopped generation useEffect(() => { - scrollToBottom(); - }, [conversation]); + if (pendingInput !== null) { + setInput(pendingInput); + onPendingInputConsumed(); + setTimeout(() => textareaRef.current?.focus(), 0); + } + }, [pendingInput]); const handleSubmit = (e) => { e.preventDefault(); @@ -30,7 +107,6 @@ export default function ChatInterface({ }; const handleKeyDown = (e) => { - // Submit on Enter (without Shift) if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleSubmit(e); @@ -57,57 +133,13 @@ export default function ChatInterface({ <p>Ask a question to consult the LLM Council</p> </div> ) : ( - conversation.messages.map((msg, index) => ( - <div key={index} className="message-group"> - {msg.role === 'user' ? ( - <div className="user-message"> - <div className="message-label">You</div> - <div className="message-content"> - <div className="markdown-content"> - <ReactMarkdown>{msg.content}</ReactMarkdown> - </div> - </div> - </div> - ) : ( - <div className="assistant-message"> - <div className="message-label">LLM Council</div> - - {/* Stage 1 */} - {msg.loading?.stage1 && ( - <div className="stage-loading"> - <div className="spinner"></div> - <span>Running Stage 1: Collecting individual responses...</span> - </div> - )} - {msg.stage1 && <Stage1 responses={msg.stage1} />} - - {/* Stage 2 */} - {msg.loading?.stage2 && ( - <div className="stage-loading"> - <div className="spinner"></div> - <span>Running Stage 2: Peer rankings...</span> - </div> - )} - {msg.stage2 && ( - <Stage2 - rankings={msg.stage2} - labelToModel={msg.metadata?.label_to_model} - aggregateRankings={msg.metadata?.aggregate_rankings} - /> - )} - - {/* Stage 3 */} - {msg.loading?.stage3 && ( - <div className="stage-loading"> - <div className="spinner"></div> - <span>Running Stage 3: Final synthesis...</span> - </div> - )} - {msg.stage3 && <Stage3 finalResponse={msg.stage3} />} - </div> - )} - </div> - )) + conversation.messages.map((msg, index) => { + if (msg.role === 'user') { + return <UserMessage key={index} content={msg.content} />; + } + const isLastAssistant = isLoading && index === conversation.messages.length - 1; + return <AssistantMessage key={index} msg={msg} isActive={isLastAssistant} />; + }) )} {isLoading && ( @@ -120,9 +152,9 @@ export default function ChatInterface({ <div ref={messagesEndRef} /> </div> - {conversation.messages.length === 0 && ( - <form className="input-form" onSubmit={handleSubmit}> + <form className="input-form" onSubmit={handleSubmit}> <textarea + ref={textareaRef} className="message-input" placeholder="Ask your question... (Shift+Enter for new line, Enter to send)" value={input} @@ -131,15 +163,24 @@ export default function ChatInterface({ disabled={isLoading} rows={3} /> - <button - type="submit" - className="send-button" - disabled={!input.trim() || isLoading} - > - Send - </button> + {isLoading ? ( + <button + type="button" + className="stop-button" + onClick={onStopGeneration} + > + Stop + </button> + ) : ( + <button + type="submit" + className="send-button" + disabled={!input.trim()} + > + Send + </button> + )} </form> - )} </div> ); } |
