import React, { useState, useEffect, useRef, useMemo } from 'react'; import { useReactFlow } from 'reactflow'; import useFlowStore from '../store/flowStore'; import { useAuthStore } from '../store/authStore'; import type { NodeData, Trace, Message, MergedTrace, MergeStrategy, CouncilData, CouncilMemberConfig, DebateData, DebateRound } from '../store/flowStore'; import type { Edge } from 'reactflow'; import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; import remarkMath from 'remark-math'; import rehypeKatex from 'rehype-katex'; import 'katex/dist/katex.min.css'; // Preprocess LaTeX delimiters: convert \(...\) → $...$ and \[...\] → $$...$$ const preprocessLaTeX = (content: string): string => { return content .replace(/\\\[([\s\S]*?)\\\]/g, (_, math) => `$$${math}$$`) .replace(/\\\(([\s\S]*?)\\\)/g, (_, math) => `$${math}$`); }; import { Play, Settings, Info, ChevronLeft, ChevronRight, ChevronDown, Maximize2, Edit3, X, Check, FileText, MessageCircle, Send, GripVertical, GitMerge, Trash2, AlertCircle, Loader2, Navigation, Upload, Search, Link, Layers, Eye, EyeOff, Copy, ClipboardCheck, Users, MessageSquare } from 'lucide-react'; interface SidebarProps { isOpen: boolean; onToggle: () => void; onInteract?: () => void; } const Sidebar: React.FC = ({ isOpen, onToggle, onInteract }) => { const { nodes, edges, selectedNodeId, updateNodeData, getActiveContext, addNode, setSelectedNode, isTraceComplete, theme, createMergedTrace, updateMergedTrace, deleteMergedTrace, unfoldMergedTrace, computeMergedMessages, files, uploadFile, refreshFiles, addFileScope, removeFileScope, currentBlueprintPath, saveCurrentBlueprint } = useFlowStore(); const { getAuthHeader, user } = useAuthStore(); const { setCenter, getViewport } = useReactFlow(); const isDark = theme === 'dark'; // Premium models and authorized users const PREMIUM_USERS = ['test', 'blackhao']; const canUsePremiumModels = user?.username ? PREMIUM_USERS.includes(user.username) : false; // Premium models: gpt-5-pro, gpt-5.2-pro, o3 const [activeTab, setActiveTab] = useState<'interact' | 'settings' | 'debug'>('interact'); const [streamBuffer, setStreamBuffer] = useState(''); const [streamingNodeId, setStreamingNodeId] = useState(null); // Track which node is streaming // Attachments state const [showAttachModal, setShowAttachModal] = useState(false); const [attachSearch, setAttachSearch] = useState(''); const settingsUploadRef = useRef(null); const [settingsUploading, setSettingsUploading] = useState(false); // Response Modal & Edit states const [isModalOpen, setIsModalOpen] = useState(false); const [isEditing, setIsEditing] = useState(false); const [editedResponse, setEditedResponse] = useState(''); const [rawTextMode, setRawTextMode] = useState(false); const [copiedResponse, setCopiedResponse] = useState(false); // Summary states const [showSummaryModal, setShowSummaryModal] = useState(false); const [summaryModel, setSummaryModel] = useState('gpt-5-nano'); const [isSummarizing, setIsSummarizing] = useState(false); // Quick Chat states const [quickChatOpen, setQuickChatOpen] = useState(false); const [quickChatTrace, setQuickChatTrace] = useState(null); const [quickChatLastNodeId, setQuickChatLastNodeId] = useState(null); // Track the last node in the chat chain const [quickChatMessages, setQuickChatMessages] = useState([]); const [quickChatInput, setQuickChatInput] = useState(''); const [quickChatModel, setQuickChatModel] = useState('gpt-5.1'); const [quickChatLoading, setQuickChatLoading] = useState(false); const [quickChatTemp, setQuickChatTemp] = useState(0.7); const [quickChatEffort, setQuickChatEffort] = useState<'low' | 'medium' | 'high'>('medium'); const [quickChatNeedsDuplicate, setQuickChatNeedsDuplicate] = useState(false); const [quickChatWebSearch, setQuickChatWebSearch] = useState(true); const [quickChatAttachedFiles, setQuickChatAttachedFiles] = useState([]); // File IDs for current message const [quickChatSentFiles, setQuickChatSentFiles] = useState<{msgId: string, fileIds: string[]}[]>([]); // Files sent with messages const [showQuickChatAttachModal, setShowQuickChatAttachModal] = useState(false); const [quickChatAttachSearch, setQuickChatAttachSearch] = useState(''); const [quickChatUploading, setQuickChatUploading] = useState(false); // Upload loading state const quickChatEndRef = useRef(null); const quickChatInputRef = useRef(null); const quickChatUploadRef = useRef(null); // Merge Trace states const [showMergeModal, setShowMergeModal] = useState(false); const [mergeSelectedIds, setMergeSelectedIds] = useState([]); const [mergeStrategy, setMergeStrategy] = useState('query_time'); const [mergeDraggedId, setMergeDraggedId] = useState(null); const [mergeOrder, setMergeOrder] = useState([]); const [showMergePreview, setShowMergePreview] = useState(false); const [isSummarizingMerge, setIsSummarizingMerge] = useState(false); // Council mode states const [councilStage, setCouncilStage] = useState(''); const [councilStreamBuffer, setCouncilStreamBuffer] = useState(''); const [openCtxDropdown, setOpenCtxDropdown] = useState(null); const [councilTab, setCouncilTab] = useState<'final' | 'responses' | 'rankings'>('final'); const [councilResponseTab, setCouncilResponseTab] = useState(0); const [councilRankingTab, setCouncilRankingTab] = useState<'aggregate' | number>('aggregate'); // Quick Chat council mode states const [quickChatCouncilMode, setQuickChatCouncilMode] = useState(false); const [quickChatCouncilModels, setQuickChatCouncilModels] = useState([]); const [quickChatChairmanModel, setQuickChatChairmanModel] = useState(null); const [quickChatCouncilStage, setQuickChatCouncilStage] = useState(''); const [quickChatCouncilData, setQuickChatCouncilData] = useState(null); const [quickChatCouncilConfigOpen, setQuickChatCouncilConfigOpen] = useState(false); // Debate mode states const [debateStage, setDebateStage] = useState(''); const [debateStreamBuffer, setDebateStreamBuffer] = useState(''); const [debateTab, setDebateTab] = useState<'final' | 'timeline' | 'per-model'>('timeline'); const [debateTimelineExpandedRounds, setDebateTimelineExpandedRounds] = useState>(new Set()); const [debatePerModelSelected, setDebatePerModelSelected] = useState(''); const selectedNode = nodes.find((n) => n.id === selectedNodeId); // Reset stream buffer and modal states when node changes useEffect(() => { setStreamBuffer(''); setIsModalOpen(false); setIsEditing(false); setShowMergeModal(false); setMergeSelectedIds([]); setShowMergePreview(false); }, [selectedNodeId]); // Default select first trace when node changes and no trace is selected useEffect(() => { if (selectedNode && selectedNode.data.traces && selectedNode.data.traces.length > 0 && (!selectedNode.data.activeTraceIds || selectedNode.data.activeTraceIds.length === 0)) { updateNodeData(selectedNode.id, { activeTraceIds: [selectedNode.data.traces[0].id] }); } }, [selectedNodeId, selectedNode?.data.traces?.length]); // Sync editedResponse when entering edit mode useEffect(() => { if (isEditing && selectedNode) { setEditedResponse(selectedNode.data.response || ''); } }, [isEditing, selectedNode?.data.response]); // Scroll to bottom when quick chat messages change useEffect(() => { if (quickChatEndRef.current) { quickChatEndRef.current.scrollIntoView({ behavior: 'smooth' }); } }, [quickChatMessages]); // Attachment helpers const handleAttach = async (fileId: string) => { if (!selectedNode) return; const current = selectedNode.data.attachedFileIds || []; if (!current.includes(fileId)) { updateNodeData(selectedNode.id, { attachedFileIds: [...current, fileId] }); // Add scope to file for filtering const projectPath = currentBlueprintPath || 'untitled'; const scope = `${projectPath}/${selectedNode.id}`; try { await addFileScope(fileId, scope); } catch (e) { console.error('Failed to add file scope:', e); } // Auto-save blueprint to persist attached files if (currentBlueprintPath) { saveCurrentBlueprint(currentBlueprintPath, getViewport()).catch(console.error); } } setShowAttachModal(false); }; const handleDetach = async (fileId: string) => { if (!selectedNode) return; const current = selectedNode.data.attachedFileIds || []; updateNodeData(selectedNode.id, { attachedFileIds: current.filter(id => id !== fileId) }); // Remove scope from file const projectPath = currentBlueprintPath || 'untitled'; const scope = `${projectPath}/${selectedNode.id}`; try { await removeFileScope(fileId, scope); } catch (e) { console.error('Failed to remove file scope:', e); } // Auto-save blueprint to persist detached files if (currentBlueprintPath) { saveCurrentBlueprint(currentBlueprintPath, getViewport()).catch(console.error); } }; const handleUploadAndAttach = async (e: React.ChangeEvent) => { if (!e.target.files || e.target.files.length === 0 || !selectedNode) return; const file = e.target.files[0]; setSettingsUploading(true); try { const meta = await uploadFile(file, { provider: 'local' }); handleAttach(meta.id); } catch (err) { alert(`Upload failed: ${(err as Error).message}`); } finally { e.target.value = ''; setSettingsUploading(false); } }; // Image helpers const isImageFile = (mime: string) => ['image/jpeg', 'image/png', 'image/gif', 'image/webp'].includes(mime); const getImageUrl = (fileId: string) => `${import.meta.env.VITE_BACKEND_URL || ''}/api/files/download?user=${encodeURIComponent(user?.username || 'test')}&file_id=${encodeURIComponent(fileId)}`; // Paste handler: upload pasted image and attach it const handlePasteImage = async ( e: React.ClipboardEvent, addFile: (fileId: string) => void, scopeFn?: () => string, ) => { const items = e.clipboardData?.items; if (!items) return; for (let i = 0; i < items.length; i++) { const item = items[i]; if (item.type.startsWith('image/')) { e.preventDefault(); const blob = item.getAsFile(); if (!blob) continue; const file = new File([blob], `paste-${Date.now()}.${blob.type.split('/')[1] || 'png'}`, { type: blob.type }); try { const meta = await uploadFile(file, { provider: 'local' }); addFile(meta.id); if (scopeFn) { try { await addFileScope(meta.id, scopeFn()); } catch {} } } catch (err) { console.error('Paste upload failed:', err); } return; // only handle first image } } }; // Filter files for attach modal const filteredFilesToAttach = useMemo(() => { const q = attachSearch.trim().toLowerCase(); if (!q) return files; return files.filter(f => f.name.toLowerCase().includes(q)); }, [files, attachSearch]); // Filter files for Quick Chat attach modal const filteredQuickChatFiles = useMemo(() => { const q = quickChatAttachSearch.trim().toLowerCase(); if (!q) return files; return files.filter(f => f.name.toLowerCase().includes(q)); }, [files, quickChatAttachSearch]); if (!isOpen) { return (
{selectedNode && (
{selectedNode.data.label}
)}
); } if (!selectedNode) { return (
Details

Select a node to edit

); } const handleRun = async () => { if (!selectedNode) return; // Check if upstream is complete before running const tracesCheck = checkActiveTracesComplete(); if (!tracesCheck.complete) { console.warn('Cannot run: upstream context is incomplete'); return; } // Capture the node ID at the start of the request const runningNodeId = selectedNode.id; const runningPrompt = selectedNode.data.userPrompt; // Record query sent timestamp const querySentAt = Date.now(); updateNodeData(runningNodeId, { status: 'loading', response: '', querySentAt }); setStreamBuffer(''); setStreamingNodeId(runningNodeId); // Use getActiveContext which respects the user's selected traces const context = getActiveContext(runningNodeId); // Calculate scopes: all nodes in the current trace path const projectPath = currentBlueprintPath || 'untitled'; // Compute all upstream node IDs by traversing edges backward const traceNodeIds = new Set(); traceNodeIds.add(runningNodeId); const visited = new Set(); const queue = [runningNodeId]; while (queue.length > 0) { const currentNodeId = queue.shift()!; if (visited.has(currentNodeId)) continue; visited.add(currentNodeId); // Find all incoming edges to this node const incomingEdges = edges.filter(e => e.target === currentNodeId); for (const edge of incomingEdges) { const sourceNodeId = edge.source; if (!visited.has(sourceNodeId)) { traceNodeIds.add(sourceNodeId); queue.push(sourceNodeId); } } } // Build scopes for all nodes in the trace path const scopes = Array.from(traceNodeIds).map(nodeId => `${projectPath}/${nodeId}`); console.log('[file_search] trace scopes:', scopes); // If no prompt but has files, use a default prompt const attachedFiles = selectedNode.data.attachedFileIds || []; const effectivePrompt = runningPrompt?.trim() ? runningPrompt : attachedFiles.length > 0 ? 'Please analyze the attached files.' : ''; try { const response = await fetch(`/api/run_node_stream?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ node_id: runningNodeId, incoming_contexts: [{ messages: context }], user_prompt: effectivePrompt, attached_file_ids: attachedFiles, scopes, merge_strategy: selectedNode.data.mergeStrategy || 'smart', config: { provider: selectedNode.data.model.includes('claude') ? 'claude' : (selectedNode.data.model.includes('gpt') || selectedNode.data.model === 'o3') ? 'openai' : 'google', model_name: selectedNode.data.model, temperature: selectedNode.data.temperature, system_prompt: selectedNode.data.systemPrompt, api_key: selectedNode.data.apiKey, enable_google_search: selectedNode.data.enableGoogleSearch !== false, reasoning_effort: selectedNode.data.reasoningEffort || 'medium', } }) }); if (!response.body) return; const reader = response.body.getReader(); const decoder = new TextDecoder(); let fullResponse = ''; while (true) { const { value, done } = await reader.read(); if (done) { console.log('[stream] done, fullResponse length:', fullResponse.length); break; } const chunk = decoder.decode(value); console.log('[stream] received chunk:', chunk.substring(0, 50)); fullResponse += chunk; // Only update stream buffer, the display logic will check streamingNodeId setStreamBuffer(prev => prev + chunk); } // Update final state using captured nodeId const newUserMsg = { id: `msg_${Date.now()}_u`, role: 'user', content: runningPrompt }; const newAssistantMsg = { id: `msg_${Date.now()}_a`, role: 'assistant', content: fullResponse }; const responseReceivedAt = Date.now(); updateNodeData(runningNodeId, { status: 'success', response: fullResponse, responseReceivedAt, messages: [...context, newUserMsg, newAssistantMsg] as any }); // Auto-generate title generateTitle(runningNodeId, runningPrompt, fullResponse); } catch (error) { console.error(error); updateNodeData(runningNodeId, { status: 'error' }); } finally { setStreamingNodeId(prev => prev === runningNodeId ? null : prev); } }; // Council mode: 3-stage LLM council execution const handleRunCouncil = async () => { if (!selectedNode) return; const councilModels: CouncilMemberConfig[] = selectedNode.data.councilModels || []; const chairmanConfig: CouncilMemberConfig = selectedNode.data.chairmanModel || councilModels[0]; if (councilModels.length < 2) return; const tracesCheck = checkActiveTracesComplete(); if (!tracesCheck.complete) return; const runningNodeId = selectedNode.id; const runningPrompt = selectedNode.data.userPrompt; const querySentAt = Date.now(); updateNodeData(runningNodeId, { status: 'loading', response: '', querySentAt, councilData: { stage1: null, stage2: null, stage3: null }, }); setStreamBuffer(''); setCouncilStreamBuffer(''); setCouncilStage('Starting council...'); setStreamingNodeId(runningNodeId); const context = getActiveContext(runningNodeId); const projectPath = currentBlueprintPath || 'untitled'; const traceNodeIds = new Set(); traceNodeIds.add(runningNodeId); const visited = new Set(); const queue = [runningNodeId]; while (queue.length > 0) { const currentNodeId = queue.shift()!; if (visited.has(currentNodeId)) continue; visited.add(currentNodeId); const incomingEdges = edges.filter(e => e.target === currentNodeId); for (const edge of incomingEdges) { if (!visited.has(edge.source)) { traceNodeIds.add(edge.source); queue.push(edge.source); } } } const scopes = Array.from(traceNodeIds).map(nodeId => `${projectPath}/${nodeId}`); const attachedFiles = selectedNode.data.attachedFileIds || []; const effectivePrompt = runningPrompt?.trim() ? runningPrompt : attachedFiles.length > 0 ? 'Please analyze the attached files.' : ''; // Resolve context messages for a specific trace ID const resolveTraceContext = (traceId: string): Message[] => { const node = nodes.find(n => n.id === runningNodeId); if (!node) return []; // Search incoming traces let trace: Trace | undefined = (node.data.traces || []).find((t: Trace) => t.id === traceId); // Then outgoing traces if (!trace) trace = (node.data.outgoingTraces || []).find((t: Trace) => t.id === traceId); if (trace) { const nodePrefix = `${runningNodeId}-`; const isOriginated = trace.id === `trace-${runningNodeId}` || trace.id.startsWith('fork-') || (trace.id.startsWith('prepend-') && trace.id.includes(`-from-${runningNodeId}`)); return isOriginated ? trace.messages.filter(m => !m.id?.startsWith(nodePrefix)) : [...trace.messages]; } // Check merged traces const merged = (node.data.mergedTraces || []).find((m: MergedTrace) => m.id === traceId); if (merged) return [...merged.messages]; return []; }; try { const response = await fetch(`/api/run_council_stream?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ node_id: runningNodeId, incoming_contexts: [{ messages: context }], user_prompt: effectivePrompt, council_models: councilModels.map((cfg) => { const base: Record = { model_name: cfg.model, temperature: cfg.temperature ?? null, reasoning_effort: cfg.reasoningEffort ?? null, enable_google_search: cfg.enableWebSearch ?? null, }; if (cfg.traceId) { base.incoming_contexts = [{ messages: resolveTraceContext(cfg.traceId) }]; } return base; }), chairman_model: { model_name: chairmanConfig.model, temperature: chairmanConfig.temperature ?? null, reasoning_effort: chairmanConfig.reasoningEffort ?? null, enable_google_search: chairmanConfig.enableWebSearch ?? null, }, system_prompt: selectedNode.data.systemPrompt || null, temperature: selectedNode.data.temperature, reasoning_effort: selectedNode.data.reasoningEffort || 'medium', enable_google_search: selectedNode.data.enableGoogleSearch !== false, merge_strategy: selectedNode.data.mergeStrategy || 'smart', attached_file_ids: attachedFiles, scopes, }), }); if (!response.body) return; const reader = response.body.getReader(); const decoder = new TextDecoder(); let sseBuffer = ''; let stage1Results: Array<{ model: string; response: string }> = []; let stage2Data: any = null; let stage3Full = ''; let stage3Model = ''; while (true) { const { value, done } = await reader.read(); if (done) break; sseBuffer += decoder.decode(value, { stream: true }); // Parse SSE events (data: {...}\n\n) const parts = sseBuffer.split('\n\n'); sseBuffer = parts.pop() || ''; for (const part of parts) { const line = part.trim(); if (!line.startsWith('data: ')) continue; let evt: any; try { evt = JSON.parse(line.slice(6)); } catch { continue; } switch (evt.type) { case 'stage1_start': setCouncilStage('Stage 1: Collecting responses...'); break; case 'stage1_model_complete': stage1Results = [...stage1Results, evt.data]; setCouncilStage(`Stage 1: ${stage1Results.length}/${councilModels.length} models done`); updateNodeData(runningNodeId, { councilData: { stage1: [...stage1Results], stage2: null, stage3: null }, }); break; case 'stage1_complete': stage1Results = evt.data; updateNodeData(runningNodeId, { councilData: { stage1: stage1Results, stage2: null, stage3: null }, }); break; case 'stage2_start': setCouncilStage('Stage 2: Peer ranking...'); break; case 'stage2_complete': stage2Data = evt.data; updateNodeData(runningNodeId, { councilData: { stage1: stage1Results, stage2: stage2Data, stage3: null }, }); break; case 'stage3_start': setCouncilStage('Stage 3: Chairman synthesizing...'); setCouncilStreamBuffer(''); break; case 'stage3_chunk': stage3Full += evt.data.chunk; setCouncilStreamBuffer(stage3Full); setStreamBuffer(stage3Full); break; case 'stage3_complete': stage3Model = evt.data.model; stage3Full = evt.data.response; break; case 'complete': { const responseReceivedAt = Date.now(); const councilData: CouncilData = { stage1: stage1Results, stage2: stage2Data, stage3: { model: stage3Model, response: stage3Full }, }; const newUserMsg = { id: `msg_${Date.now()}_u`, role: 'user', content: runningPrompt }; const newAssistantMsg = { id: `msg_${Date.now()}_a`, role: 'assistant', content: stage3Full }; updateNodeData(runningNodeId, { status: 'success', response: stage3Full, responseReceivedAt, councilData, messages: [...context, newUserMsg, newAssistantMsg] as any, }); setCouncilStage(''); generateTitle(runningNodeId, runningPrompt, stage3Full); break; } case 'error': updateNodeData(runningNodeId, { status: 'error' }); setCouncilStage(''); break; } } } } catch (error) { console.error(error); updateNodeData(runningNodeId, { status: 'error' }); setCouncilStage(''); } finally { setStreamingNodeId(prev => prev === runningNodeId ? null : prev); } }; // Debate mode: multi-round LLM debate execution const handleRunDebate = async () => { if (!selectedNode) return; const debateModels: CouncilMemberConfig[] = selectedNode.data.debateModels || []; if (debateModels.length < 2) return; const tracesCheck = checkActiveTracesComplete(); if (!tracesCheck.complete) return; const runningNodeId = selectedNode.id; const runningPrompt = selectedNode.data.userPrompt; const querySentAt = Date.now(); const judgeMode = selectedNode.data.debateJudgeMode || 'external_judge'; const debateFormat = selectedNode.data.debateFormat || 'free_discussion'; const maxRounds = selectedNode.data.debateMaxRounds || 5; updateNodeData(runningNodeId, { status: 'loading', response: '', querySentAt, debateData: { rounds: [], finalVerdict: null, config: { judgeMode, format: debateFormat, maxRounds }, }, }); setStreamBuffer(''); setDebateStreamBuffer(''); setDebateStage('Starting debate...'); setStreamingNodeId(runningNodeId); const context = getActiveContext(runningNodeId); const projectPath = currentBlueprintPath || 'untitled'; const traceNodeIds = new Set(); traceNodeIds.add(runningNodeId); const visited = new Set(); const queue = [runningNodeId]; while (queue.length > 0) { const currentNodeId = queue.shift()!; if (visited.has(currentNodeId)) continue; visited.add(currentNodeId); const incomingEdges = edges.filter(e => e.target === currentNodeId); for (const edge of incomingEdges) { if (!visited.has(edge.source)) { traceNodeIds.add(edge.source); queue.push(edge.source); } } } const scopes = Array.from(traceNodeIds).map(nodeId => `${projectPath}/${nodeId}`); const attachedFiles = selectedNode.data.attachedFileIds || []; const effectivePrompt = runningPrompt?.trim() ? runningPrompt : attachedFiles.length > 0 ? 'Please analyze the attached files.' : ''; try { const judgeModelConfig = selectedNode.data.judgeModel || debateModels[0]; const response = await fetch(`/api/run_debate_stream?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ node_id: runningNodeId, incoming_contexts: [{ messages: context }], user_prompt: effectivePrompt, debate_models: debateModels.map((cfg) => ({ model_name: cfg.model, temperature: cfg.temperature ?? null, reasoning_effort: cfg.reasoningEffort ?? null, enable_google_search: cfg.enableWebSearch ?? null, })), judge_model: judgeMode === 'external_judge' ? { model_name: judgeModelConfig.model, temperature: judgeModelConfig.temperature ?? null, reasoning_effort: judgeModelConfig.reasoningEffort ?? null, enable_google_search: judgeModelConfig.enableWebSearch ?? null, } : null, judge_mode: judgeMode, debate_format: debateFormat, custom_format_prompt: selectedNode.data.debateCustomPrompt || null, max_rounds: maxRounds, system_prompt: selectedNode.data.systemPrompt || null, temperature: selectedNode.data.temperature, reasoning_effort: selectedNode.data.reasoningEffort || 'medium', enable_google_search: selectedNode.data.enableGoogleSearch !== false, merge_strategy: selectedNode.data.mergeStrategy || 'smart', attached_file_ids: attachedFiles, scopes, }), }); if (!response.body) return; const reader = response.body.getReader(); const decoder = new TextDecoder(); let sseBuffer = ''; const debateRounds: DebateRound[] = []; let currentRound = 0; let currentRoundResponses: Array<{ model: string; response: string }> = []; let finalModel = ''; let finalFull = ''; while (true) { const { value, done } = await reader.read(); if (done) break; sseBuffer += decoder.decode(value, { stream: true }); const parts = sseBuffer.split('\n\n'); sseBuffer = parts.pop() || ''; for (const part of parts) { const line = part.trim(); if (!line.startsWith('data: ')) continue; let evt: any; try { evt = JSON.parse(line.slice(6)); } catch { continue; } switch (evt.type) { case 'debate_start': setDebateStage(`Debate started (${evt.data.models.length} models, max ${evt.data.max_rounds} rounds)`); break; case 'round_start': currentRound = evt.data.round; currentRoundResponses = []; setDebateStage(`Round ${currentRound}/${maxRounds}: Collecting responses...`); break; case 'round_model_complete': currentRoundResponses = [...currentRoundResponses, { model: evt.data.model, response: evt.data.response }]; setDebateStage(`Round ${currentRound}/${maxRounds}: ${currentRoundResponses.length}/${debateModels.length} models done`); break; case 'round_complete': { const roundData: DebateRound = { round: evt.data.round, responses: evt.data.responses }; debateRounds.push(roundData); updateNodeData(runningNodeId, { debateData: { rounds: [...debateRounds], finalVerdict: null, config: { judgeMode, format: debateFormat, maxRounds }, }, }); break; } case 'judge_decision': { const lastRound = debateRounds[debateRounds.length - 1]; if (lastRound) { lastRound.judgeDecision = { continue: evt.data.continue, reasoning: evt.data.reasoning }; updateNodeData(runningNodeId, { debateData: { rounds: [...debateRounds], finalVerdict: null, config: { judgeMode, format: debateFormat, maxRounds }, }, }); } if (!evt.data.continue) { setDebateStage('Judge stopped debate. Generating final verdict...'); } else { setDebateStage(`Judge: Continue to round ${currentRound + 1}...`); } break; } case 'convergence_check': { const lastRound2 = debateRounds[debateRounds.length - 1]; if (lastRound2) { lastRound2.converged = evt.data.converged; updateNodeData(runningNodeId, { debateData: { rounds: [...debateRounds], finalVerdict: null, config: { judgeMode, format: debateFormat, maxRounds }, }, }); } if (evt.data.converged) { setDebateStage('Consensus reached!'); } break; } case 'final_start': finalModel = evt.data.model; setDebateStage('Judge synthesizing final verdict...'); setDebateStreamBuffer(''); break; case 'final_chunk': finalFull += evt.data.chunk; setDebateStreamBuffer(finalFull); setStreamBuffer(finalFull); break; case 'final_complete': { finalModel = evt.data.model; finalFull = evt.data.response; const responseReceivedAt = Date.now(); const debateData: DebateData = { rounds: debateRounds, finalVerdict: { model: finalModel, response: finalFull }, config: { judgeMode, format: debateFormat, maxRounds }, }; const newUserMsg = { id: `msg_${Date.now()}_u`, role: 'user', content: runningPrompt }; const newAssistantMsg = { id: `msg_${Date.now()}_a`, role: 'assistant', content: finalFull }; updateNodeData(runningNodeId, { status: 'success', response: finalFull, responseReceivedAt, debateData, messages: [...context, newUserMsg, newAssistantMsg] as any, }); setDebateStage(''); generateTitle(runningNodeId, runningPrompt, finalFull); break; } case 'debate_complete': { // If no final verdict (display_only or self_convergence without explicit final_complete) const currentNode = nodes.find(n => n.id === runningNodeId); if (currentNode?.data.status === 'loading') { const responseReceivedAt = Date.now(); const lastRoundResp = debateRounds.length > 0 ? debateRounds[debateRounds.length - 1].responses : []; const bestResponse = lastRoundResp.length > 0 ? lastRoundResp.reduce((a, b) => a.response.length > b.response.length ? a : b).response : ''; const debateData: DebateData = { rounds: debateRounds, finalVerdict: finalFull ? { model: finalModel, response: finalFull } : null, config: { judgeMode, format: debateFormat, maxRounds }, }; const displayResponse = finalFull || bestResponse; const newUserMsg = { id: `msg_${Date.now()}_u`, role: 'user', content: runningPrompt }; const newAssistantMsg = { id: `msg_${Date.now()}_a`, role: 'assistant', content: displayResponse }; updateNodeData(runningNodeId, { status: 'success', response: displayResponse, responseReceivedAt, debateData, messages: [...context, newUserMsg, newAssistantMsg] as any, }); setDebateStage(''); if (displayResponse) generateTitle(runningNodeId, runningPrompt, displayResponse); } break; } case 'error': updateNodeData(runningNodeId, { status: 'error' }); setDebateStage(''); break; } } } } catch (error) { console.error(error); updateNodeData(runningNodeId, { status: 'error' }); setDebateStage(''); } finally { setStreamingNodeId(prev => prev === runningNodeId ? null : prev); } }; const handleChange = (field: keyof NodeData, value: any) => { updateNodeData(selectedNode.id, { [field]: value }); }; const handleSaveEdit = () => { if (!selectedNode) return; updateNodeData(selectedNode.id, { response: editedResponse }); setIsEditing(false); }; const handleCancelEdit = () => { setIsEditing(false); setEditedResponse(selectedNode?.data.response || ''); }; // Summarize response const handleSummarize = async () => { if (!selectedNode?.data.response) return; setIsSummarizing(true); setShowSummaryModal(false); try { const res = await fetch(`/api/summarize?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ content: selectedNode.data.response, model: summaryModel }) }); if (res.ok) { const data = await res.json(); if (data.summary) { // Replace response with summary updateNodeData(selectedNode.id, { response: data.summary }); } } } catch (error) { console.error('Summarization failed:', error); } finally { setIsSummarizing(false); } }; // Auto-generate title using gpt-5-nano const generateTitle = async (nodeId: string, userPrompt: string, response: string) => { try { const res = await fetch(`/api/generate_title?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ user_prompt: userPrompt, response }) }); if (res.ok) { const data = await res.json(); if (data.title) { updateNodeData(nodeId, { label: data.title }); } } } catch (error) { console.error('Failed to generate title:', error); // Silently fail - keep the original title } }; // Open merge modal const openMergeModal = () => { if (!selectedNode?.data.traces) return; const traceIds = selectedNode.data.traces.map((t: Trace) => t.id); setMergeOrder(traceIds); setMergeSelectedIds([]); setShowMergePreview(false); setShowMergeModal(true); }; // Drag-and-drop handlers for merge modal const handleMergeDragStart = (e: React.DragEvent, traceId: string) => { setMergeDraggedId(traceId); e.dataTransfer.effectAllowed = 'move'; }; const handleMergeDragOver = (e: React.DragEvent, overTraceId: string) => { e.preventDefault(); if (!mergeDraggedId || mergeDraggedId === overTraceId) return; const newOrder = [...mergeOrder]; const draggedIndex = newOrder.indexOf(mergeDraggedId); const overIndex = newOrder.indexOf(overTraceId); if (draggedIndex !== -1 && overIndex !== -1) { newOrder.splice(draggedIndex, 1); newOrder.splice(overIndex, 0, mergeDraggedId); setMergeOrder(newOrder); } }; const handleMergeDragEnd = () => { setMergeDraggedId(null); }; // Toggle trace selection in merge modal const toggleMergeSelection = (traceId: string) => { setMergeSelectedIds(prev => { if (prev.includes(traceId)) { return prev.filter(id => id !== traceId); } else { return [...prev, traceId]; } }); }; // Create merged trace const handleCreateMergedTrace = async () => { if (!selectedNode || mergeSelectedIds.length < 2) return; // Get the ordered trace IDs based on mergeOrder const orderedSelectedIds = mergeOrder.filter(id => mergeSelectedIds.includes(id)); if (mergeStrategy === 'summary') { setIsSummarizingMerge(true); try { const messages = computeMergedMessages(selectedNode.id, orderedSelectedIds, 'trace_order'); const content = messages.map(m => `${m.role}: ${m.content}`).join('\n\n'); const res = await fetch(`/api/summarize?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ content, model_name: 'gpt-5-nano', api_key: selectedNode.data.apiKey }) }); if (res.ok) { const data = await res.json(); const mergedId = createMergedTrace(selectedNode.id, orderedSelectedIds, 'summary'); if (mergedId && data.summary) { updateMergedTrace(selectedNode.id, mergedId, { summarizedContent: data.summary }); } } } catch (error) { console.error('Failed to summarize for merge:', error); } finally { setIsSummarizingMerge(false); } } else { createMergedTrace(selectedNode.id, orderedSelectedIds, mergeStrategy); } // Close modal and reset setShowMergeModal(false); setMergeSelectedIds([]); setShowMergePreview(false); }; // Get preview of merged messages const getMergePreview = () => { if (!selectedNode || mergeSelectedIds.length < 2) return []; const orderedSelectedIds = mergeOrder.filter(id => mergeSelectedIds.includes(id)); return computeMergedMessages(selectedNode.id, orderedSelectedIds, mergeStrategy); }; // Check if a trace has downstream nodes from the current selected node const traceHasDownstream = (_trace: Trace): boolean => { if (!selectedNode) return false; // Find edges going out from selectedNode that are part of this trace const outgoingEdge = edges.find(e => e.source === selectedNode.id && e.sourceHandle?.startsWith('trace-') ); return !!outgoingEdge; }; // Quick Chat functions const openQuickChat = (trace: Trace | null, isNewTrace: boolean = false) => { if (!selectedNode) return; onInteract?.(); // Close context menu when opening quick chat // Check if current node has a "sent" query (has response) or just unsent draft const hasResponse = !!selectedNode.data.response; const hasDraftPrompt = !!selectedNode.data.userPrompt && !hasResponse; // Helper to extract node ID from message ID (format: nodeId-u or nodeId-a) const getNodeIdFromMsgId = (msgId: string): string | null => { if (!msgId) return null; const parts = msgId.split('-'); if (parts.length >= 2) { // Remove last part (-u or -a) and rejoin return parts.slice(0, -1).join('-'); } return null; }; // Helper to build sentFiles from messages const buildSentFilesFromMessages = (messages: Message[]): {msgId: string, fileIds: string[]}[] => { const sentFiles: {msgId: string, fileIds: string[]}[] = []; for (const msg of messages) { if (msg.role === 'user' && msg.id) { const nodeId = getNodeIdFromMsgId(msg.id); if (nodeId) { const node = nodes.find(n => n.id === nodeId); if (node && node.data.attachedFileIds && node.data.attachedFileIds.length > 0) { sentFiles.push({ msgId: msg.id, fileIds: node.data.attachedFileIds }); } } } } return sentFiles; }; if (isNewTrace || !trace) { // Start a new trace from current node const initialMessages: Message[] = []; // Only include user prompt as message if it was actually sent (has response) if (selectedNode.data.userPrompt && hasResponse) { initialMessages.push({ id: `${selectedNode.id}-u`, role: 'user', content: selectedNode.data.userPrompt }); } if (selectedNode.data.response) { initialMessages.push({ id: `${selectedNode.id}-a`, role: 'assistant', content: selectedNode.data.response }); } setQuickChatTrace({ id: `new-trace-${selectedNode.id}`, sourceNodeId: selectedNode.id, color: '#888', messages: initialMessages }); setQuickChatMessages(initialMessages); setQuickChatSentFiles(buildSentFilesFromMessages(initialMessages)); setQuickChatNeedsDuplicate(false); setQuickChatLastNodeId(selectedNode.id); } else { // Use existing trace context const hasDownstream = traceHasDownstream(trace); setQuickChatNeedsDuplicate(hasDownstream); // Build full message history const fullMessages: Message[] = [...trace.messages]; // Only include current node's content if it was sent if (selectedNode.data.userPrompt && hasResponse) { fullMessages.push({ id: `${selectedNode.id}-u`, role: 'user', content: selectedNode.data.userPrompt }); } if (selectedNode.data.response) { fullMessages.push({ id: `${selectedNode.id}-a`, role: 'assistant', content: selectedNode.data.response }); } setQuickChatTrace({ ...trace, sourceNodeId: selectedNode.id, messages: fullMessages }); setQuickChatMessages(fullMessages); setQuickChatSentFiles(buildSentFilesFromMessages(fullMessages)); // Always set last node ID to current selected node // handleQuickChatSend will decide whether to overwrite (if empty) or create new node (if has response) setQuickChatLastNodeId(selectedNode.id); } // Copy council settings from node if active if (selectedNode.data.councilMode && selectedNode.data.councilModels && selectedNode.data.councilModels.length >= 2) { setQuickChatCouncilMode(true); setQuickChatCouncilModels([...selectedNode.data.councilModels]); setQuickChatChairmanModel(selectedNode.data.chairmanModel || selectedNode.data.councilModels[0]); setQuickChatCouncilConfigOpen(false); } else { setQuickChatCouncilMode(false); } setQuickChatCouncilData(null); setQuickChatCouncilStage(''); setQuickChatOpen(true); // If there's an unsent draft, put it in the input box setQuickChatInput(hasDraftPrompt ? selectedNode.data.userPrompt : ''); }; const closeQuickChat = () => { setQuickChatOpen(false); setQuickChatTrace(null); setQuickChatMessages([]); setQuickChatAttachedFiles([]); setQuickChatSentFiles([]); setQuickChatCouncilMode(false); setQuickChatCouncilData(null); setQuickChatCouncilStage(''); }; // Quick Chat file attachment helpers const getQuickChatScope = () => { const projectPath = currentBlueprintPath || 'untitled'; return `${projectPath}/quick_chat_temp`; }; const handleQuickChatAttach = async (fileId: string) => { if (!quickChatAttachedFiles.includes(fileId)) { setQuickChatAttachedFiles(prev => [...prev, fileId]); // Add scope to file for filtering try { await addFileScope(fileId, getQuickChatScope()); } catch (e) { console.error('Failed to add file scope:', e); } } setShowQuickChatAttachModal(false); }; const handleQuickChatDetach = async (fileId: string) => { setQuickChatAttachedFiles(prev => prev.filter(id => id !== fileId)); // Remove scope from file try { await removeFileScope(fileId, getQuickChatScope()); } catch (e) { console.error('Failed to remove file scope:', e); } }; const handleQuickChatUpload = async (e: React.ChangeEvent) => { if (!e.target.files || e.target.files.length === 0) return; const file = e.target.files[0]; setQuickChatUploading(true); try { const meta = await uploadFile(file, { provider: 'local' }); setQuickChatAttachedFiles(prev => [...prev, meta.id]); // Add scope to file for filtering try { await addFileScope(meta.id, getQuickChatScope()); } catch (e) { console.error('Failed to add file scope:', e); } } catch (err) { alert(`Upload failed: ${(err as Error).message}`); } finally { e.target.value = ''; setQuickChatUploading(false); } }; // Open Quick Chat for a merged trace const openMergedQuickChat = (merged: MergedTrace) => { if (!selectedNode) return; onInteract?.(); // Check if current node has a "sent" query (has response) or just unsent draft const hasResponse = !!selectedNode.data.response; const hasDraftPrompt = !!selectedNode.data.userPrompt && !hasResponse; // Helper to extract node ID from message ID (format: nodeId-u or nodeId-a) const getNodeIdFromMsgId = (msgId: string): string | null => { if (!msgId) return null; const parts = msgId.split('-'); if (parts.length >= 2) { return parts.slice(0, -1).join('-'); } return null; }; // Build messages from merged trace const fullMessages: Message[] = [...merged.messages]; // Only include current node's content if it was sent if (selectedNode.data.userPrompt && hasResponse) { fullMessages.push({ id: `${selectedNode.id}-u`, role: 'user', content: selectedNode.data.userPrompt }); } if (selectedNode.data.response) { fullMessages.push({ id: `${selectedNode.id}-a`, role: 'assistant', content: selectedNode.data.response }); } // Build sentFiles from messages const sentFiles: {msgId: string, fileIds: string[]}[] = []; for (const msg of fullMessages) { if (msg.role === 'user' && msg.id) { const nodeId = getNodeIdFromMsgId(msg.id); if (nodeId) { const node = nodes.find(n => n.id === nodeId); if (node && node.data.attachedFileIds && node.data.attachedFileIds.length > 0) { sentFiles.push({ msgId: msg.id, fileIds: node.data.attachedFileIds }); } } } } // Create a pseudo-trace for the merged context setQuickChatTrace({ id: merged.id, sourceNodeId: selectedNode.id, color: merged.colors[0] || '#888', messages: fullMessages }); setQuickChatMessages(fullMessages); setQuickChatSentFiles(sentFiles); setQuickChatNeedsDuplicate(false); // Merged traces don't duplicate setQuickChatOpen(true); // If there's an unsent draft, put it in the input box setQuickChatInput(hasDraftPrompt ? selectedNode.data.userPrompt : ''); }; // Check if a trace is complete (all upstream nodes have Q&A) const canQuickChat = (trace: Trace): boolean => { return isTraceComplete(trace); }; // Helper: Check if all upstream nodes have complete Q&A by traversing edges const checkUpstreamNodesComplete = (nodeId: string, visited: Set = new Set()): boolean => { if (visited.has(nodeId)) return true; // Avoid cycles visited.add(nodeId); const node = nodes.find(n => n.id === nodeId); if (!node) return true; // Find all incoming edges to this node const incomingEdges = edges.filter(e => e.target === nodeId); for (const edge of incomingEdges) { const sourceNode = nodes.find(n => n.id === edge.source); if (!sourceNode) continue; // Check if source node is disabled - skip disabled nodes if (sourceNode.data.disabled) continue; // Check if source node has complete Q&A if (!sourceNode.data.userPrompt || !sourceNode.data.response) { return false; // Found an incomplete upstream node } // Recursively check further upstream if (!checkUpstreamNodesComplete(edge.source, visited)) { return false; } } return true; }; // Helper: find incoming edge for a given trace ID (with fallbacks) const findIncomingEdgeForTrace = (nodeId: string, traceId: string): Edge | null => { // 1) exact match by sourceHandle let edge = edges.find(e => e.target === nodeId && e.sourceHandle === `trace-${traceId}`); if (edge) return edge; // 2) fallback: any incoming edge whose source has this trace in outgoingTraces edge = edges.find(e => { if (e.target !== nodeId) return false; const src = nodes.find(n => n.id === e.source); return src?.data.outgoingTraces?.some((t: Trace) => t.id === traceId); }); return edge || null; }; // Helper: get source trace IDs for a merged trace on a given node (supports propagated merged traces) const getMergedSourceIds = (nodeId: string, traceId: string): string[] => { const node = nodes.find(n => n.id === nodeId); if (!node) return []; const mergedLocal = node.data.mergedTraces?.find((m: MergedTrace) => m.id === traceId); if (mergedLocal) return mergedLocal.sourceTraceIds || []; const incomingMatch = node.data.traces?.find((t: Trace) => t.id === traceId); if (incomingMatch?.isMerged && incomingMatch.sourceTraceIds) return incomingMatch.sourceTraceIds; const outgoingMatch = node.data.outgoingTraces?.find((t: Trace) => t.id === traceId); if (outgoingMatch?.isMerged && outgoingMatch.sourceTraceIds) return outgoingMatch.sourceTraceIds; return []; }; // Recursive: Check if specific trace path upstream has complete nodes (supports multi-level merged) const checkTracePathComplete = ( nodeId: string, traceId: string, visited: Set = new Set() ): boolean => { const visitKey = `${nodeId}-${traceId}`; if (visited.has(visitKey)) return true; visited.add(visitKey); // Determine if this node is the merge owner or just receiving a propagated merged trace const localMerge = nodes.find(n => n.id === nodeId)?.data.mergedTraces?.some(m => m.id === traceId); const localParents = getMergedSourceIds(nodeId, traceId); const incomingEdge = findIncomingEdgeForTrace(nodeId, traceId); if (!incomingEdge) { // If no incoming edge and this node owns the merge, check parents from here if (localMerge && localParents.length > 0) { for (const pid of localParents) { if (!checkTracePathComplete(nodeId, pid, visited)) return false; } return true; } return true; // head } const sourceNode = nodes.find(n => n.id === incomingEdge.source); if (!sourceNode || sourceNode.data.disabled) return true; // If merged at sourceNode (or propagated merged), recurse into each parent from the merge owner const parentIds = localMerge ? localParents : getMergedSourceIds(sourceNode.id, traceId); if (parentIds.length > 0) { const mergeOwnerId = localMerge ? nodeId : sourceNode.id; for (const pid of parentIds) { if (!checkTracePathComplete(mergeOwnerId, pid, visited)) return false; } return true; } // Regular trace: check node content then continue upstream if (!sourceNode.data.userPrompt || !sourceNode.data.response) return false; return checkTracePathComplete(sourceNode.id, traceId, visited); }; // Recursive: Find the first empty node on a specific trace path (supports multi-level merged) const findEmptyNodeOnTrace = ( nodeId: string, traceId: string, visited: Set = new Set() ): string | null => { const visitKey = `${nodeId}-${traceId}`; if (visited.has(visitKey)) return null; visited.add(visitKey); // Determine if this node owns the merge or just receives propagated merged trace const localMerge = nodes.find(n => n.id === nodeId)?.data.mergedTraces?.some(m => m.id === traceId); const localParents = getMergedSourceIds(nodeId, traceId); const incomingEdge = findIncomingEdgeForTrace(nodeId, traceId); if (!incomingEdge) { if (localMerge && localParents.length > 0) { for (const pid of localParents) { const upstreamEmpty = findEmptyNodeOnTrace(nodeId, pid, visited); if (upstreamEmpty) return upstreamEmpty; } } return null; } const sourceNode = nodes.find(n => n.id === incomingEdge.source); if (!sourceNode || sourceNode.data.disabled) return null; const parentIds = localMerge ? localParents : getMergedSourceIds(sourceNode.id, traceId); if (parentIds.length > 0) { const mergeOwnerId = localMerge ? nodeId : sourceNode.id; for (const pid of parentIds) { const upstreamEmpty = findEmptyNodeOnTrace(mergeOwnerId, pid, visited); if (upstreamEmpty) return upstreamEmpty; } } if (!sourceNode.data.userPrompt || !sourceNode.data.response) { return sourceNode.id; } return findEmptyNodeOnTrace(sourceNode.id, traceId, visited); }; // Check if all active traces are complete (for main Run Node button) const checkActiveTracesComplete = (): { complete: boolean; incompleteTraceId?: string } => { if (!selectedNode) return { complete: true }; const activeTraceIds = selectedNode.data.activeTraceIds || []; if (activeTraceIds.length === 0) return { complete: true }; // Check upstream nodes ONLY for active traces (supports merged trace recursion) for (const traceId of activeTraceIds) { if (!checkTracePathComplete(selectedNode.id, traceId)) { return { complete: false, incompleteTraceId: 'upstream' }; } } // Check incoming traces content (message integrity) const incomingTraces = selectedNode.data.traces || []; for (const traceId of activeTraceIds) { const trace = incomingTraces.find((t: Trace) => t.id === traceId); if (trace && !isTraceComplete(trace)) { return { complete: false, incompleteTraceId: traceId }; } } // Check merged traces content (including propagated merged traces) for (const traceId of activeTraceIds) { const sourceIds = getMergedSourceIds(selectedNode.id, traceId); if (sourceIds.length > 0) { for (const sourceId of sourceIds) { const sourceTrace = incomingTraces.find((t: Trace) => t.id === sourceId); if (sourceTrace && !isTraceComplete(sourceTrace)) { return { complete: false, incompleteTraceId: sourceId }; } } } } return { complete: true }; }; // Navigate to an empty upstream node on the active traces const navigateToEmptyNode = () => { if (!selectedNode) return; const activeTraceIds = selectedNode.data.activeTraceIds || []; for (const traceId of activeTraceIds) { const emptyNodeId = findEmptyNodeOnTrace(selectedNode.id, traceId); if (emptyNodeId) { const emptyNode = nodes.find(n => n.id === emptyNodeId); if (emptyNode) { setCenter(emptyNode.position.x + 100, emptyNode.position.y + 50, { zoom: 1.2, duration: 500 }); setSelectedNode(emptyNodeId); return; // Found one, navigate and stop } } } }; const activeTracesCheck = selectedNode ? checkActiveTracesComplete() : { complete: true }; const handleQuickChatSend = async () => { // Allow send if there's text OR attached files const hasContent = quickChatInput.trim() || quickChatAttachedFiles.length > 0; if (!hasContent || !quickChatTrace || quickChatLoading || !selectedNode) return; const userInput = quickChatInput; const attachedFilesCopy = [...quickChatAttachedFiles]; const msgId = `qc_${Date.now()}_u`; const userMessage: Message = { id: msgId, role: 'user', content: userInput || '[Files attached]' }; // Track sent files for display if (attachedFilesCopy.length > 0) { setQuickChatSentFiles(prev => [...prev, { msgId, fileIds: attachedFilesCopy }]); } // Add user message to display const messagesBeforeSend = [...quickChatMessages]; setQuickChatMessages(prev => [...prev, userMessage]); setQuickChatInput(''); setQuickChatAttachedFiles([]); // Clear attached files after send setQuickChatLoading(true); // Store model at send time to avoid issues with model switching during streaming const modelAtSend = quickChatModel; const tempAtSend = quickChatTemp; const effortAtSend = quickChatEffort; const webSearchAtSend = quickChatWebSearch; const reasoningModels = ['gpt-5', 'gpt-5-chat-latest', 'gpt-5-mini', 'gpt-5-nano', 'gpt-5-pro', 'gpt-5.1', 'gpt-5.1-chat-latest', 'gpt-5.2', 'gpt-5.2-chat-latest', 'gpt-5.2-pro', 'o3']; // Snapshot council config at send time const isCouncilSend = quickChatCouncilMode && quickChatCouncilModels.length >= 2; const councilModelsAtSend = isCouncilSend ? [...quickChatCouncilModels] : []; const chairmanAtSend = isCouncilSend ? (quickChatChairmanModel || quickChatCouncilModels[0]) : null; try { // Build scopes for file search (Quick Chat uses a temp scope) const projectPath = currentBlueprintPath || 'untitled'; const scopes = [`${projectPath}/quick_chat_temp`]; let fullResponse = ''; if (isCouncilSend && chairmanAtSend) { // ========== COUNCIL MODE ========== setQuickChatCouncilStage('Starting council...'); setQuickChatCouncilData({ stage1: null, stage2: null, stage3: null }); const response = await fetch(`/api/run_council_stream?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ node_id: 'quick_chat_temp', incoming_contexts: [{ messages: messagesBeforeSend }], user_prompt: userInput || 'Please analyze the attached files.', council_models: councilModelsAtSend.map(cfg => ({ model_name: cfg.model, temperature: cfg.temperature ?? null, reasoning_effort: cfg.reasoningEffort ?? null, enable_google_search: cfg.enableWebSearch ?? null, })), chairman_model: { model_name: chairmanAtSend.model, temperature: chairmanAtSend.temperature ?? null, reasoning_effort: chairmanAtSend.reasoningEffort ?? null, enable_google_search: chairmanAtSend.enableWebSearch ?? null, }, system_prompt: selectedNode.data.systemPrompt || null, temperature: selectedNode.data.temperature, reasoning_effort: selectedNode.data.reasoningEffort || 'medium', enable_google_search: selectedNode.data.enableGoogleSearch !== false, merge_strategy: 'smart', attached_file_ids: attachedFilesCopy, scopes, }), }); if (!response.ok) { const errText = await response.text(); throw new Error(errText || `HTTP ${response.status}`); } if (!response.body) throw new Error('No response body'); const reader = response.body.getReader(); const decoder = new TextDecoder(); let sseBuffer = ''; let stage1Results: Array<{ model: string; response: string }> = []; let stage2Data: any = null; let stage3Full = ''; let stage3Model = ''; while (true) { const { value, done } = await reader.read(); if (done) break; sseBuffer += decoder.decode(value, { stream: true }); const parts = sseBuffer.split('\n\n'); sseBuffer = parts.pop() || ''; for (const part of parts) { const line = part.trim(); if (!line.startsWith('data: ')) continue; let evt: any; try { evt = JSON.parse(line.slice(6)); } catch { continue; } switch (evt.type) { case 'stage1_start': setQuickChatCouncilStage('Stage 1: Collecting responses...'); break; case 'stage1_model_complete': stage1Results = [...stage1Results, evt.data]; setQuickChatCouncilStage(`Stage 1: ${stage1Results.length}/${councilModelsAtSend.length} models done`); setQuickChatCouncilData({ stage1: [...stage1Results], stage2: null, stage3: null }); break; case 'stage1_complete': stage1Results = evt.data; setQuickChatCouncilData({ stage1: stage1Results, stage2: null, stage3: null }); break; case 'stage2_start': setQuickChatCouncilStage('Stage 2: Peer ranking...'); break; case 'stage2_complete': stage2Data = evt.data; setQuickChatCouncilData({ stage1: stage1Results, stage2: stage2Data, stage3: null }); break; case 'stage3_start': setQuickChatCouncilStage('Stage 3: Chairman synthesizing...'); break; case 'stage3_chunk': stage3Full += evt.data.chunk; // Stream chairman response into chat setQuickChatMessages(prev => { const newMsgs = [...prev]; const lastMsg = newMsgs[newMsgs.length - 1]; if (lastMsg?.role === 'assistant') { return [...newMsgs.slice(0, -1), { ...lastMsg, content: stage3Full }]; } else { return [...newMsgs, { id: `qc_${Date.now()}_a`, role: 'assistant', content: stage3Full }]; } }); break; case 'stage3_complete': stage3Model = evt.data.model; stage3Full = evt.data.response; break; case 'complete': { const finalData: CouncilData = { stage1: stage1Results, stage2: stage2Data, stage3: { model: stage3Model, response: stage3Full }, }; setQuickChatCouncilData(finalData); setQuickChatCouncilStage(''); // Ensure final message is set setQuickChatMessages(prev => { const newMsgs = [...prev]; const lastMsg = newMsgs[newMsgs.length - 1]; if (lastMsg?.role === 'assistant') { return [...newMsgs.slice(0, -1), { ...lastMsg, content: stage3Full }]; } else { return [...newMsgs, { id: `qc_${Date.now()}_a`, role: 'assistant', content: stage3Full }]; } }); break; } case 'error': setQuickChatCouncilStage(''); throw new Error(evt.data?.message || 'Council error'); } } } fullResponse = stage3Full; } else { // ========== SINGLE MODEL MODE ========== // Determine provider const isClaude = modelAtSend.includes('claude'); const isOpenAI = modelAtSend.includes('gpt') || modelAtSend === 'o3'; const provider = isClaude ? 'claude' : isOpenAI ? 'openai' : 'google'; const isReasoning = reasoningModels.includes(modelAtSend); const response = await fetch(`/api/run_node_stream?user=${encodeURIComponent(user?.username || 'test')}`, { method: 'POST', headers: { 'Content-Type': 'application/json', ...getAuthHeader() }, body: JSON.stringify({ node_id: 'quick_chat_temp', incoming_contexts: [{ messages: messagesBeforeSend }], user_prompt: userInput || 'Please analyze the attached files.', attached_file_ids: attachedFilesCopy, scopes, merge_strategy: 'smart', config: { provider, model_name: modelAtSend, temperature: isReasoning ? 1 : tempAtSend, enable_google_search: webSearchAtSend, reasoning_effort: effortAtSend, } }) }); if (!response.ok) { const errText = await response.text(); throw new Error(errText || `HTTP ${response.status}`); } if (!response.body) throw new Error('No response body'); const reader = response.body.getReader(); const decoder = new TextDecoder(); while (true) { const { value, done } = await reader.read(); if (done) break; const chunk = decoder.decode(value); fullResponse += chunk; setQuickChatMessages(prev => { const newMsgs = [...prev]; const lastMsg = newMsgs[newMsgs.length - 1]; if (lastMsg?.role === 'assistant') { return [...newMsgs.slice(0, -1), { ...lastMsg, content: fullResponse }]; } else { return [...newMsgs, { id: `qc_${Date.now()}_a`, role: 'assistant', content: fullResponse }]; } }); } } // Determine whether to overwrite current node or create new one // Use quickChatLastNodeId as the "current" node in the chat flow to ensure continuity // If not set, fallback to quickChatTrace.sourceNodeId (initial state) const fromNodeId = quickChatLastNodeId || quickChatTrace.sourceNodeId; const fromNode = nodes.find(n => n.id === fromNodeId); const fromNodeHasResponse = fromNode?.data.response && fromNode.data.response.trim() !== ''; if (!fromNodeHasResponse && fromNode) { // Overwrite the source node (it's empty) const nodeUpdate: any = { userPrompt: userInput, response: fullResponse, model: isCouncilSend ? chairmanAtSend!.model : modelAtSend, temperature: isCouncilSend ? selectedNode.data.temperature : (reasoningModels.includes(modelAtSend) ? 1 : tempAtSend), reasoningEffort: isCouncilSend ? (selectedNode.data.reasoningEffort || 'medium') : effortAtSend, enableGoogleSearch: isCouncilSend ? (selectedNode.data.enableGoogleSearch !== false) : webSearchAtSend, attachedFileIds: attachedFilesCopy, status: 'success', querySentAt: Date.now(), responseReceivedAt: Date.now(), }; if (isCouncilSend) { nodeUpdate.councilMode = true; nodeUpdate.councilModels = councilModelsAtSend; nodeUpdate.chairmanModel = chairmanAtSend; nodeUpdate.councilData = quickChatCouncilData; } updateNodeData(fromNodeId, nodeUpdate); // Update trace to reflect current node now has content setQuickChatTrace(prev => prev ? { ...prev, messages: [...messagesBeforeSend, userMessage, { id: `qc_${Date.now()}_a`, role: 'assistant', content: fullResponse }] } : null); // Update last node ID setQuickChatLastNodeId(fromNodeId); // Generate title generateTitle(fromNodeId, userInput, fullResponse); } else { // Create new node (source node has response, continue the chain) const newNodeId = `node_${Date.now()}`; const sourceNode = fromNode || selectedNode; const newPos = { x: sourceNode.position.x + 300, y: sourceNode.position.y }; const newNodeData: any = { label: isCouncilSend ? 'Council Chat' : 'Quick Chat', model: isCouncilSend ? chairmanAtSend!.model : modelAtSend, temperature: isCouncilSend ? selectedNode.data.temperature : (reasoningModels.includes(modelAtSend) ? 1 : tempAtSend), systemPrompt: '', userPrompt: userInput, mergeStrategy: 'smart' as const, reasoningEffort: isCouncilSend ? (selectedNode.data.reasoningEffort || 'medium') : effortAtSend, enableGoogleSearch: isCouncilSend ? (selectedNode.data.enableGoogleSearch !== false) : webSearchAtSend, traces: [], outgoingTraces: [], forkedTraces: [], mergedTraces: [], activeTraceIds: [], attachedFileIds: attachedFilesCopy, response: fullResponse, status: 'success' as const, inputs: 1, querySentAt: Date.now(), responseReceivedAt: Date.now(), }; if (isCouncilSend) { newNodeData.councilMode = true; newNodeData.councilModels = councilModelsAtSend; newNodeData.chairmanModel = chairmanAtSend; newNodeData.councilData = quickChatCouncilData; } const newNode = { id: newNodeId, type: 'llmNode', position: newPos, data: { ...newNodeData, } }; addNode(newNode); // Connect to the source node setTimeout(() => { const store = useFlowStore.getState(); const currentEdges = store.edges; const sourceNodeData = store.nodes.find(n => n.id === fromNodeId); // Find the right trace handle to use let sourceHandle = 'new-trace'; // Get the base trace ID (e.g., 'trace-A' from 'trace-A_B_C' or 'new-trace-A' or 'merged-xxx') const currentTraceId = quickChatTrace?.id || ''; const isNewTrace = currentTraceId.startsWith('new-trace-'); const isMergedTrace = currentTraceId.startsWith('merged-'); if (isMergedTrace) { // For merged trace: find the merged trace handle on the source node // The trace ID may have evolved (e.g., 'merged-xxx' -> 'merged-xxx_nodeA' -> 'merged-xxx_nodeA_nodeB') // We need to find the version that ends with the current source node ID // First try: exact match with evolved ID (merged-xxx_sourceNodeId) const evolvedMergedId = `${currentTraceId}_${fromNodeId}`; let mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( t => t.id === evolvedMergedId ); // Second try: find trace that starts with merged ID and ends with this node if (!mergedOutgoing) { mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( t => t.id.startsWith(currentTraceId) && t.id.endsWith(`_${fromNodeId}`) ); } // Third try: find any trace that contains the merged ID if (!mergedOutgoing) { mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( t => t.id.startsWith(currentTraceId) || t.id === currentTraceId ); } // Fourth try: find any merged trace if (!mergedOutgoing) { mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( t => t.id.startsWith('merged-') ); } if (mergedOutgoing) { sourceHandle = `trace-${mergedOutgoing.id}`; } else { // Last resort: use the merged trace ID directly sourceHandle = `trace-${currentTraceId}`; } } else if (isNewTrace) { // For "Start New Trace": create a fresh independent trace from the original node // First, check if this is the original starting node or a continuation node const originalStartNodeId = currentTraceId.replace('new-trace-', ''); const isOriginalNode = fromNodeId === originalStartNodeId; if (isOriginalNode) { // This is the first round - starting from original node const hasOutgoingEdges = currentEdges.some(e => e.source === fromNodeId); if (hasOutgoingEdges) { // Original node already has downstream - create a new fork sourceHandle = 'new-trace'; } else { // No downstream yet - use self trace const selfTrace = sourceNodeData?.data.outgoingTraces?.find( t => t.id === `trace-${fromNodeId}` ); if (selfTrace) { sourceHandle = `trace-${selfTrace.id}`; } } } else { // This is a continuation - find the trace ID (should be preserved now) // Look for a trace that was created from the original node's self trace const matchingTrace = sourceNodeData?.data.outgoingTraces?.find(t => { return t.id.includes(originalStartNodeId); }); if (matchingTrace) { sourceHandle = `trace-${matchingTrace.id}`; } else { // Fallback 1: Check INCOMING traces (Connect to Continue Handle) const incoming = sourceNodeData?.data.traces?.find(t => t.id.includes(originalStartNodeId) ); if (incoming) { // ID is preserved, so handle ID is just trace-{id} sourceHandle = `trace-${incoming.id}`; } else { // Fallback 2: find any trace that ends with fromNodeId (unlikely if ID preserved) const anyMatch = sourceNodeData?.data.outgoingTraces?.find( t => t.id === `trace-${fromNodeId}` ); if (anyMatch) { sourceHandle = `trace-${anyMatch.id}`; } } } } } else { // For existing trace: ID is preserved const baseTraceId = currentTraceId.replace(/^trace-/, ''); // 1. Try OUTGOING traces first (if already connected downstream) const matchingOutgoing = sourceNodeData?.data.outgoingTraces?.find(t => { const traceBase = t.id.replace(/^trace-/, ''); return traceBase === baseTraceId; // Exact match now }); if (matchingOutgoing) { sourceHandle = `trace-${matchingOutgoing.id}`; } else { // 2. Try INCOMING traces (Connect to Continue Handle) const matchingIncoming = sourceNodeData?.data.traces?.find(t => { const tId = t.id.replace(/^trace-/, ''); return tId === baseTraceId; // Exact match now }); if (matchingIncoming) { // ID is preserved sourceHandle = `trace-${matchingIncoming.id}`; } } } // If this is the first message and we need to duplicate (has downstream), // onConnect will automatically handle the trace duplication // because the sourceHandle already has an outgoing edge store.onConnect({ source: fromNodeId, sourceHandle, target: newNodeId, targetHandle: 'input-0' }); // After first duplication, subsequent messages continue on the new trace // Reset the duplicate flag since we're now on the new branch setQuickChatNeedsDuplicate(false); // Update trace for continued chat - use newNodeId as the new source // Find the actual trace ID on the new node to ensure continuity const newNode = store.nodes.find(n => n.id === newNodeId); const currentId = quickChatTrace?.id || ''; const isMerged = currentId.startsWith('merged-'); const isCurrentNewTrace = currentId.startsWith('new-trace-'); let nextTraceId = currentId; if (newNode && newNode.data.outgoingTraces) { // Find the trace that continues the current conversation // Now trace IDs don't evolve, so it should be simpler if (isMerged) { // Merged traces might still need evolution or logic check // For now assuming linear extension keeps same ID if we changed flowStore // But merged trace logic in flowStore might still append ID? // Let's check if evolved version exists const evolved = newNode.data.outgoingTraces.find(t => t.id === `${currentId}_${newNodeId}` ); if (evolved) nextTraceId = evolved.id; else nextTraceId = currentId; // Try keeping same ID } else if (isCurrentNewTrace) { // For new trace, check if we have an outgoing trace with the start node ID const startNodeId = currentId.replace('new-trace-', ''); const match = newNode.data.outgoingTraces.find(t => t.id.includes(startNodeId) ); if (match) nextTraceId = match.id; } else { // Regular trace: ID should be preserved nextTraceId = currentId; } } setQuickChatTrace(prev => prev ? { ...prev, id: nextTraceId, sourceNodeId: newNodeId, messages: [...messagesBeforeSend, userMessage, { id: `qc_${Date.now()}_a`, role: 'assistant', content: fullResponse }] } : null); // Update last node ID to the new node setQuickChatLastNodeId(newNodeId); // Generate title generateTitle(newNodeId, userInput, fullResponse); }, 100); } } catch (error) { console.error('Quick chat error:', error); setQuickChatMessages(prev => [...prev, { id: `qc_err_${Date.now()}`, role: 'assistant', content: `Error: ${error}` }]); setQuickChatCouncilStage(''); } finally { setQuickChatLoading(false); setQuickChatCouncilStage(''); // Refocus the input after sending setTimeout(() => { quickChatInputRef.current?.focus(); }, 50); } }; return (
{/* Header */}
handleChange('label', e.target.value)} className={`font-bold text-lg bg-transparent border-none focus:ring-0 focus:outline-none w-full ${ isDark ? 'text-gray-200' : 'text-gray-900' }`} />
{selectedNode.data.status}
ID: {selectedNode.id}
{/* Tabs */}
{/* Content */}
{activeTab === 'interact' && (
{selectedNode.data.debateMode ? ( /* Debate mode: multi-model selector + judge config */
{(() => { const debateModelsList = [ { value: 'claude-sonnet-4-5', label: 'claude-sonnet-4.5' }, { value: 'claude-opus-4', label: 'claude-opus-4' }, { value: 'claude-opus-4-5', label: 'claude-opus-4.5' }, { value: 'claude-opus-4-6', label: 'claude-opus-4.6' }, { value: 'gemini-2.5-flash', label: 'gemini-2.5-flash' }, { value: 'gemini-2.5-flash-lite', label: 'gemini-2.5-flash-lite' }, { value: 'gemini-3-pro-preview', label: 'gemini-3-pro-preview' }, { value: 'gpt-4.1', label: 'gpt-4.1' }, { value: 'gpt-4o', label: 'gpt-4o' }, { value: 'gpt-5', label: 'gpt-5' }, { value: 'gpt-5-mini', label: 'gpt-5-mini' }, { value: 'gpt-5-nano', label: 'gpt-5-nano' }, { value: 'gpt-5.1', label: 'gpt-5.1' }, { value: 'gpt-5.2', label: 'gpt-5.2' }, { value: 'o3', label: 'o3', premium: true }, ]; const members: CouncilMemberConfig[] = selectedNode.data.debateModels || []; const isReasoningModel = (v: string) => ['gpt-5', 'gpt-5-mini', 'gpt-5-nano', 'gpt-5-pro', 'gpt-5.1', 'gpt-5.2', 'gpt-5.2-pro', 'o3'].includes(v); const updateDebateMember = (modelName: string, field: string, value: any) => { const updated = [...members]; const idx = updated.findIndex(c => c.model === modelName); if (idx >= 0) { updated[idx] = { ...updated[idx], [field]: value }; handleChange('debateModels', updated); } }; return debateModelsList.map(m => { const selected = members.some(c => c.model === m.value); const disabled = (m as any).premium && !canUsePremiumModels; const cfg = members.find(c => c.model === m.value); return (
{selected && cfg && (
{isReasoningModel(m.value) && ( )}
)}
); }); })()}
{/* Judge Mode */}
{/* Judge Model (only for external_judge) */} {(selectedNode.data.debateJudgeMode || 'external_judge') === 'external_judge' && (
)} {/* Debate Format */}
{/* Custom Prompt (only for custom format) */} {selectedNode.data.debateFormat === 'custom' && (