From f97b7a1bfa220a0947f2cd63c23f4faa9fcd42e7 Mon Sep 17 00:00:00 2001 From: blackhao <13851610112@163.com> Date: Mon, 8 Dec 2025 15:07:12 -0600 Subject: merge logic --- frontend/src/App.tsx | 183 ++- frontend/src/components/ContextMenu.tsx | 22 +- frontend/src/components/LeftSidebar.tsx | 71 +- frontend/src/components/Sidebar.tsx | 1576 ++++++++++++++++++++++++-- frontend/src/components/edges/MergedEdge.tsx | 77 ++ frontend/src/components/nodes/LLMNode.tsx | 259 ++++- frontend/src/index.css | 182 ++- frontend/src/store/flowStore.ts | 1357 ++++++++++++++++++++-- 8 files changed, 3460 insertions(+), 267 deletions(-) create mode 100644 frontend/src/components/edges/MergedEdge.tsx diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 9ec1340..5776091 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -6,21 +6,27 @@ import ReactFlow, { ReactFlowProvider, Panel, useReactFlow, + SelectionMode, type Node, type Edge } from 'reactflow'; import 'reactflow/dist/style.css'; import useFlowStore from './store/flowStore'; import LLMNode from './components/nodes/LLMNode'; +import MergedEdge from './components/edges/MergedEdge'; import Sidebar from './components/Sidebar'; import LeftSidebar from './components/LeftSidebar'; import { ContextMenu } from './components/ContextMenu'; -import { Plus } from 'lucide-react'; +import { Plus, Sun, Moon, LayoutGrid } from 'lucide-react'; const nodeTypes = { llmNode: LLMNode, }; +const edgeTypes = { + merged: MergedEdge, +}; + function Flow() { const { nodes, @@ -32,24 +38,45 @@ function Flow() { deleteEdge, deleteNode, deleteBranch, + deleteTrace, setSelectedNode, toggleNodeDisabled, archiveNode, createNodeFromArchive, - toggleTraceDisabled + toggleTraceDisabled, + theme, + toggleTheme, + autoLayout, + findNonOverlappingPosition } = useFlowStore(); const reactFlowWrapper = useRef(null); - const { project } = useReactFlow(); - const [menu, setMenu] = useState<{ x: number; y: number; type: 'pane' | 'node' | 'edge'; id?: string } | null>(null); + const { project, getViewport } = useReactFlow(); + const [menu, setMenu] = useState<{ x: number; y: number; type: 'pane' | 'node' | 'edge' | 'multiselect'; id?: string; selectedIds?: string[] } | null>(null); const [isLeftOpen, setIsLeftOpen] = useState(true); const [isRightOpen, setIsRightOpen] = useState(true); - const onPaneClick = () => { + // Get selected nodes + const selectedNodes = nodes.filter(n => n.selected); + + const onPaneClick = useCallback(() => { setSelectedNode(null); setMenu(null); - }; + }, [setSelectedNode]); + + // Close menu on various interactions + const closeMenu = useCallback(() => { + setMenu(null); + }, []); + + const handleNodeDragStart = useCallback(() => { + setMenu(null); + }, []); + + const handleMoveStart = useCallback(() => { + setMenu(null); + }, []); const handlePaneContextMenu = (event: React.MouseEvent) => { event.preventDefault(); @@ -58,7 +85,33 @@ function Flow() { const handleNodeContextMenu = (event: React.MouseEvent, node: Node) => { event.preventDefault(); - setMenu({ x: event.clientX, y: event.clientY, type: 'node', id: node.id }); + // Check if multiple nodes are selected and the right-clicked node is one of them + if (selectedNodes.length > 1 && selectedNodes.some(n => n.id === node.id)) { + setMenu({ + x: event.clientX, + y: event.clientY, + type: 'multiselect', + selectedIds: selectedNodes.map(n => n.id) + }); + } else { + setMenu({ x: event.clientX, y: event.clientY, type: 'node', id: node.id }); + } + }; + + // Batch operations for multi-select + const handleBatchDelete = (nodeIds: string[]) => { + nodeIds.forEach(id => deleteNode(id)); + setMenu(null); + }; + + const handleBatchDisable = (nodeIds: string[]) => { + nodeIds.forEach(id => toggleNodeDisabled(id)); + setMenu(null); + }; + + const handleBatchArchive = (nodeIds: string[]) => { + nodeIds.forEach(id => archiveNode(id)); + setMenu(null); }; const handleEdgeContextMenu = (event: React.MouseEvent, edge: Edge) => { @@ -68,7 +121,20 @@ function Flow() { const handleAddNode = (position?: { x: number, y: number }) => { const id = `node_${Date.now()}`; - const pos = position || { x: Math.random() * 400, y: Math.random() * 400 }; + + // If no position provided, use viewport center + let basePos = position; + if (!basePos && reactFlowWrapper.current) { + const { x, y, zoom } = getViewport(); + const rect = reactFlowWrapper.current.getBoundingClientRect(); + // Calculate center of viewport in flow coordinates + basePos = { + x: (-x + rect.width / 2) / zoom - 100, // offset by half node width + y: (-y + rect.height / 2) / zoom - 40 // offset by half node height + }; + } + basePos = basePos || { x: 200, y: 200 }; + const pos = findNonOverlappingPosition(basePos.x, basePos.y); addNode({ id, @@ -76,7 +142,7 @@ function Flow() { position: pos, data: { label: 'New Question', - model: 'gpt-4o', + model: 'gpt-5.1', temperature: 0.7, systemPrompt: '', userPrompt: '', @@ -86,6 +152,7 @@ function Flow() { traces: [], outgoingTraces: [], forkedTraces: [], + mergedTraces: [], response: '', status: 'idle', inputs: 1 @@ -124,11 +191,11 @@ function Flow() { }; return ( -
+
setIsLeftOpen(!isLeftOpen)} />
{ closeMenu(); onNodeClick(e, node); }} onPaneClick={onPaneClick} onPaneContextMenu={handlePaneContextMenu} onNodeContextMenu={handleNodeContextMenu} onEdgeContextMenu={handleEdgeContextMenu} + onNodeDragStart={handleNodeDragStart} + onMoveStart={handleMoveStart} + onSelectionStart={closeMenu} fitView + panOnDrag + selectionOnDrag + selectionKeyCode="Shift" + multiSelectionKeyCode="Shift" + selectionMode={SelectionMode.Partial} > - - - + + + - +
+ + + +
@@ -180,7 +288,27 @@ function Flow() { } } } - ] : menu.type === 'node' ? (() => { + ] : menu.type === 'multiselect' ? (() => { + // Multi-select menu - batch operations + const ids = menu.selectedIds || []; + const allDisabled = ids.every(id => nodes.find(n => n.id === id)?.data?.disabled); + + return [ + { + label: allDisabled ? `Enable ${ids.length} Nodes` : `Disable ${ids.length} Nodes`, + onClick: () => handleBatchDisable(ids) + }, + { + label: `Archive ${ids.length} Nodes`, + onClick: () => handleBatchArchive(ids) + }, + { + label: `Delete ${ids.length} Nodes`, + danger: true, + onClick: () => handleBatchDelete(ids) + } + ]; + })() : menu.type === 'node' ? (() => { const targetNode = nodes.find(n => n.id === menu.id); const isDisabled = targetNode?.data?.disabled; @@ -210,7 +338,7 @@ function Flow() { onClick: () => menu.id && deleteBranch(menu.id) } ]; - })() : (() => { + })() : menu.type === 'edge' ? (() => { // Check if any node connected to this edge is disabled const targetEdge = edges.find(e => e.id === menu.id); const sourceNode = nodes.find(n => n.id === targetEdge?.source); @@ -230,14 +358,19 @@ function Flow() { label: 'Delete Branch', danger: true, onClick: () => menu.id && deleteBranch(undefined, menu.id) + }, + { + label: 'Delete Trace', + danger: true, + onClick: () => menu.id && deleteTrace(menu.id) } ]; - })() + })() : [] } /> )}
- setIsRightOpen(!isRightOpen)} /> + setIsRightOpen(!isRightOpen)} onInteract={closeMenu} />
); } diff --git a/frontend/src/components/ContextMenu.tsx b/frontend/src/components/ContextMenu.tsx index 459641b..8104f8c 100644 --- a/frontend/src/components/ContextMenu.tsx +++ b/frontend/src/components/ContextMenu.tsx @@ -1,4 +1,5 @@ import React from 'react'; +import useFlowStore from '../store/flowStore'; interface ContextMenuProps { x: number; @@ -8,16 +9,31 @@ interface ContextMenuProps { } export const ContextMenu: React.FC = ({ x, y, items, onClose }) => { + const { theme } = useFlowStore(); + const isDark = theme === 'dark'; + return (
e.stopPropagation()} // Prevent click through + onClick={(e) => e.stopPropagation()} > {items.map((item, idx) => ( {/* Icons when collapsed */}
- - - + + +
); } return ( -
+
{/* Header */} -
-

Workspace

+
+

Workspace

{/* Tabs */} -
+
{/* Content Area */} -
+
{activeTab === 'project' && (
@@ -97,28 +116,34 @@ const LeftSidebar: React.FC = ({ isOpen, onToggle }) => {
) : ( <> -

Drag to canvas to create a copy

+

Drag to canvas to create a copy

{archivedNodes.map((archived) => (
handleDragStart(e, archived.id)} - className="p-2 bg-gray-50 border border-gray-200 rounded-md cursor-grab hover:bg-gray-100 hover:border-gray-300 transition-colors group" + className={`p-2 border rounded-md cursor-grab transition-colors group ${ + isDark + ? 'bg-gray-700 border-gray-600 hover:bg-gray-600 hover:border-gray-500' + : 'bg-gray-50 border-gray-200 hover:bg-gray-100 hover:border-gray-300' + }`} >
- - {archived.label} + + {archived.label}
-
{archived.model}
+
{archived.model}
))} diff --git a/frontend/src/components/Sidebar.tsx b/frontend/src/components/Sidebar.tsx index 165028c..28a40f6 100644 --- a/frontend/src/components/Sidebar.tsx +++ b/frontend/src/components/Sidebar.tsx @@ -1,18 +1,25 @@ -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, useRef } from 'react'; import useFlowStore from '../store/flowStore'; -import type { NodeData } from '../store/flowStore'; +import type { NodeData, Trace, Message, MergedTrace, MergeStrategy } from '../store/flowStore'; import ReactMarkdown from 'react-markdown'; -import { Play, Settings, Info, Save, ChevronLeft, ChevronRight, Maximize2, Edit3, X, Check, FileText } from 'lucide-react'; +import { Play, Settings, Info, Save, ChevronLeft, ChevronRight, Maximize2, Edit3, X, Check, FileText, MessageCircle, Send, GripVertical, GitMerge, Trash2, AlertCircle, Loader2 } from 'lucide-react'; interface SidebarProps { isOpen: boolean; onToggle: () => void; + onInteract?: () => void; } -const Sidebar: React.FC = ({ isOpen, onToggle }) => { - const { nodes, selectedNodeId, updateNodeData, getActiveContext } = useFlowStore(); +const Sidebar: React.FC = ({ isOpen, onToggle, onInteract }) => { + const { + nodes, edges, selectedNodeId, updateNodeData, getActiveContext, addNode, setSelectedNode, + isTraceComplete, createQuickChatNode, theme, + createMergedTrace, updateMergedTrace, deleteMergedTrace, computeMergedMessages + } = useFlowStore(); + const isDark = theme === 'dark'; const [activeTab, setActiveTab] = useState<'interact' | 'settings' | 'debug'>('interact'); const [streamBuffer, setStreamBuffer] = useState(''); + const [streamingNodeId, setStreamingNodeId] = useState(null); // Track which node is streaming // Response Modal & Edit states const [isModalOpen, setIsModalOpen] = useState(false); @@ -23,6 +30,29 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { const [showSummaryModal, setShowSummaryModal] = useState(false); const [summaryModel, setSummaryModel] = useState('gpt-5-nano'); const [isSummarizing, setIsSummarizing] = useState(false); + + // Quick Chat states + const [quickChatOpen, setQuickChatOpen] = useState(false); + const [quickChatTrace, setQuickChatTrace] = useState(null); + const [quickChatMessages, setQuickChatMessages] = useState([]); + const [quickChatInput, setQuickChatInput] = useState(''); + const [quickChatModel, setQuickChatModel] = useState('gpt-5.1'); + const [quickChatLoading, setQuickChatLoading] = useState(false); + const [quickChatTemp, setQuickChatTemp] = useState(0.7); + const [quickChatEffort, setQuickChatEffort] = useState<'low' | 'medium' | 'high'>('medium'); + const [quickChatNeedsDuplicate, setQuickChatNeedsDuplicate] = useState(false); + const [quickChatWebSearch, setQuickChatWebSearch] = useState(true); + const quickChatEndRef = useRef(null); + const quickChatInputRef = useRef(null); + + // Merge Trace states + const [showMergeModal, setShowMergeModal] = useState(false); + const [mergeSelectedIds, setMergeSelectedIds] = useState([]); + const [mergeStrategy, setMergeStrategy] = useState('query_time'); + const [mergeDraggedId, setMergeDraggedId] = useState(null); + const [mergeOrder, setMergeOrder] = useState([]); + const [showMergePreview, setShowMergePreview] = useState(false); + const [isSummarizingMerge, setIsSummarizingMerge] = useState(false); const selectedNode = nodes.find((n) => n.id === selectedNodeId); @@ -31,8 +61,23 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { setStreamBuffer(''); setIsModalOpen(false); setIsEditing(false); + setShowMergeModal(false); + setMergeSelectedIds([]); + setShowMergePreview(false); }, [selectedNodeId]); + // Default select first trace when node changes and no trace is selected + useEffect(() => { + if (selectedNode && + selectedNode.data.traces && + selectedNode.data.traces.length > 0 && + (!selectedNode.data.activeTraceIds || selectedNode.data.activeTraceIds.length === 0)) { + updateNodeData(selectedNode.id, { + activeTraceIds: [selectedNode.data.traces[0].id] + }); + } + }, [selectedNodeId, selectedNode?.data.traces?.length]); + // Sync editedResponse when entering edit mode useEffect(() => { if (isEditing && selectedNode) { @@ -40,18 +85,27 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { } }, [isEditing, selectedNode?.data.response]); + // Scroll to bottom when quick chat messages change + useEffect(() => { + if (quickChatEndRef.current) { + quickChatEndRef.current.scrollIntoView({ behavior: 'smooth' }); + } + }, [quickChatMessages]); + if (!isOpen) { return ( -
+
{selectedNode && ( -
+
{selectedNode.data.label}
)} @@ -61,15 +115,21 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { if (!selectedNode) { return ( -
-
- Details -
-
-

Select a node to edit

+
+

Select a node to edit

); @@ -78,20 +138,34 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { const handleRun = async () => { if (!selectedNode) return; - updateNodeData(selectedNode.id, { status: 'loading', response: '' }); + // Check if upstream is complete before running + const tracesCheck = checkActiveTracesComplete(); + if (!tracesCheck.complete) { + console.warn('Cannot run: upstream context is incomplete'); + return; + } + + // Capture the node ID at the start of the request + const runningNodeId = selectedNode.id; + const runningPrompt = selectedNode.data.userPrompt; + + // Record query sent timestamp + const querySentAt = Date.now(); + updateNodeData(runningNodeId, { status: 'loading', response: '', querySentAt }); setStreamBuffer(''); + setStreamingNodeId(runningNodeId); // Use getActiveContext which respects the user's selected traces - const context = getActiveContext(selectedNode.id); + const context = getActiveContext(runningNodeId); try { const response = await fetch('http://localhost:8000/api/run_node_stream', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - node_id: selectedNode.id, - incoming_contexts: [{ messages: context }], // Simple list wrap for now - user_prompt: selectedNode.data.userPrompt, + node_id: runningNodeId, + incoming_contexts: [{ messages: context }], + user_prompt: runningPrompt, merge_strategy: selectedNode.data.mergeStrategy || 'smart', config: { provider: selectedNode.data.model.includes('gpt') || selectedNode.data.model === 'o3' ? 'openai' : 'google', @@ -99,8 +173,8 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { temperature: selectedNode.data.temperature, system_prompt: selectedNode.data.systemPrompt, api_key: selectedNode.data.apiKey, - enable_google_search: selectedNode.data.enableGoogleSearch !== false, // Default true - reasoning_effort: selectedNode.data.reasoningEffort || 'medium', // For reasoning models + enable_google_search: selectedNode.data.enableGoogleSearch !== false, + reasoning_effort: selectedNode.data.reasoningEffort || 'medium', } }) }); @@ -115,17 +189,15 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { if (done) break; const chunk = decoder.decode(value); fullResponse += chunk; + // Only update stream buffer, the display logic will check streamingNodeId setStreamBuffer(prev => prev + chunk); - // We update the store less frequently or at the end to avoid too many re-renders - // But for "live" feel we might want to update local state `streamBuffer` and sync to store at end } - // Update final state - // Append the new interaction to the node's output messages + // Update final state using captured nodeId const newUserMsg = { id: `msg_${Date.now()}_u`, role: 'user', - content: selectedNode.data.userPrompt + content: runningPrompt }; const newAssistantMsg = { id: `msg_${Date.now()}_a`, @@ -133,19 +205,23 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { content: fullResponse }; - updateNodeData(selectedNode.id, { + const responseReceivedAt = Date.now(); + + updateNodeData(runningNodeId, { status: 'success', response: fullResponse, + responseReceivedAt, messages: [...context, newUserMsg, newAssistantMsg] as any }); - // Auto-generate title using gpt-5-nano (async, non-blocking) - // Always regenerate title after each query - generateTitle(selectedNode.id, selectedNode.data.userPrompt, fullResponse); + // Auto-generate title + generateTitle(runningNodeId, runningPrompt, fullResponse); } catch (error) { console.error(error); - updateNodeData(selectedNode.id, { status: 'error' }); + updateNodeData(runningNodeId, { status: 'error' }); + } finally { + setStreamingNodeId(prev => prev === runningNodeId ? null : prev); } }; @@ -216,33 +292,730 @@ const Sidebar: React.FC = ({ isOpen, onToggle }) => { } }; + // Open merge modal + const openMergeModal = () => { + if (!selectedNode?.data.traces) return; + const traceIds = selectedNode.data.traces.map((t: Trace) => t.id); + setMergeOrder(traceIds); + setMergeSelectedIds([]); + setShowMergePreview(false); + setShowMergeModal(true); + }; + + // Drag-and-drop handlers for merge modal + const handleMergeDragStart = (e: React.DragEvent, traceId: string) => { + setMergeDraggedId(traceId); + e.dataTransfer.effectAllowed = 'move'; + }; + + const handleMergeDragOver = (e: React.DragEvent, overTraceId: string) => { + e.preventDefault(); + if (!mergeDraggedId || mergeDraggedId === overTraceId) return; + + const newOrder = [...mergeOrder]; + const draggedIndex = newOrder.indexOf(mergeDraggedId); + const overIndex = newOrder.indexOf(overTraceId); + + if (draggedIndex !== -1 && overIndex !== -1) { + newOrder.splice(draggedIndex, 1); + newOrder.splice(overIndex, 0, mergeDraggedId); + setMergeOrder(newOrder); + } + }; + + const handleMergeDragEnd = () => { + setMergeDraggedId(null); + }; + + // Toggle trace selection in merge modal + const toggleMergeSelection = (traceId: string) => { + setMergeSelectedIds(prev => { + if (prev.includes(traceId)) { + return prev.filter(id => id !== traceId); + } else { + return [...prev, traceId]; + } + }); + }; + + // Create merged trace + const handleCreateMergedTrace = async () => { + if (!selectedNode || mergeSelectedIds.length < 2) return; + + // Get the ordered trace IDs based on mergeOrder + const orderedSelectedIds = mergeOrder.filter(id => mergeSelectedIds.includes(id)); + + if (mergeStrategy === 'summary') { + setIsSummarizingMerge(true); + try { + const messages = computeMergedMessages(selectedNode.id, orderedSelectedIds, 'trace_order'); + const content = messages.map(m => `${m.role}: ${m.content}`).join('\n\n'); + + const res = await fetch('http://localhost:8000/api/summarize', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + content, + model_name: 'gpt-5-nano', + api_key: selectedNode.data.apiKey + }) + }); + + if (res.ok) { + const data = await res.json(); + const mergedId = createMergedTrace(selectedNode.id, orderedSelectedIds, 'summary'); + if (mergedId && data.summary) { + updateMergedTrace(selectedNode.id, mergedId, { summarizedContent: data.summary }); + } + } + } catch (error) { + console.error('Failed to summarize for merge:', error); + } finally { + setIsSummarizingMerge(false); + } + } else { + createMergedTrace(selectedNode.id, orderedSelectedIds, mergeStrategy); + } + + // Close modal and reset + setShowMergeModal(false); + setMergeSelectedIds([]); + setShowMergePreview(false); + }; + + // Get preview of merged messages + const getMergePreview = () => { + if (!selectedNode || mergeSelectedIds.length < 2) return []; + const orderedSelectedIds = mergeOrder.filter(id => mergeSelectedIds.includes(id)); + return computeMergedMessages(selectedNode.id, orderedSelectedIds, mergeStrategy); + }; + + // Check if a trace has downstream nodes from the current selected node + const traceHasDownstream = (trace: Trace): boolean => { + if (!selectedNode) return false; + + // Find edges going out from selectedNode that are part of this trace + const outgoingEdge = edges.find(e => + e.source === selectedNode.id && + e.sourceHandle?.startsWith('trace-') + ); + + return !!outgoingEdge; + }; + + // Quick Chat functions + const openQuickChat = (trace: Trace | null, isNewTrace: boolean = false) => { + if (!selectedNode) return; + onInteract?.(); // Close context menu when opening quick chat + + // Check if current node has a "sent" query (has response) or just unsent draft + const hasResponse = !!selectedNode.data.response; + const hasDraftPrompt = !!selectedNode.data.userPrompt && !hasResponse; + + if (isNewTrace || !trace) { + // Start a new trace from current node + const initialMessages: Message[] = []; + // Only include user prompt as message if it was actually sent (has response) + if (selectedNode.data.userPrompt && hasResponse) { + initialMessages.push({ id: `${selectedNode.id}-u`, role: 'user', content: selectedNode.data.userPrompt }); + } + if (selectedNode.data.response) { + initialMessages.push({ id: `${selectedNode.id}-a`, role: 'assistant', content: selectedNode.data.response }); + } + + setQuickChatTrace({ + id: `new-trace-${selectedNode.id}`, + sourceNodeId: selectedNode.id, + color: '#888', + messages: initialMessages + }); + setQuickChatMessages(initialMessages); + setQuickChatNeedsDuplicate(false); + } else { + // Use existing trace context + const hasDownstream = traceHasDownstream(trace); + setQuickChatNeedsDuplicate(hasDownstream); + + // Build full message history + const fullMessages: Message[] = [...trace.messages]; + // Only include current node's content if it was sent + if (selectedNode.data.userPrompt && hasResponse) { + fullMessages.push({ id: `${selectedNode.id}-u`, role: 'user', content: selectedNode.data.userPrompt }); + } + if (selectedNode.data.response) { + fullMessages.push({ id: `${selectedNode.id}-a`, role: 'assistant', content: selectedNode.data.response }); + } + + setQuickChatTrace({ + ...trace, + sourceNodeId: selectedNode.id, + messages: fullMessages + }); + setQuickChatMessages(fullMessages); + } + + setQuickChatOpen(true); + // If there's an unsent draft, put it in the input box + setQuickChatInput(hasDraftPrompt ? selectedNode.data.userPrompt : ''); + }; + + const closeQuickChat = () => { + setQuickChatOpen(false); + setQuickChatTrace(null); + setQuickChatMessages([]); + }; + + // Open Quick Chat for a merged trace + const openMergedQuickChat = (merged: MergedTrace) => { + if (!selectedNode) return; + onInteract?.(); + + // Check if current node has a "sent" query (has response) or just unsent draft + const hasResponse = !!selectedNode.data.response; + const hasDraftPrompt = !!selectedNode.data.userPrompt && !hasResponse; + + // Build messages from merged trace + const fullMessages: Message[] = [...merged.messages]; + // Only include current node's content if it was sent + if (selectedNode.data.userPrompt && hasResponse) { + fullMessages.push({ id: `${selectedNode.id}-u`, role: 'user', content: selectedNode.data.userPrompt }); + } + if (selectedNode.data.response) { + fullMessages.push({ id: `${selectedNode.id}-a`, role: 'assistant', content: selectedNode.data.response }); + } + + // Create a pseudo-trace for the merged context + setQuickChatTrace({ + id: merged.id, + sourceNodeId: selectedNode.id, + color: merged.colors[0] || '#888', + messages: fullMessages + }); + setQuickChatMessages(fullMessages); + setQuickChatNeedsDuplicate(false); // Merged traces don't duplicate + + setQuickChatOpen(true); + // If there's an unsent draft, put it in the input box + setQuickChatInput(hasDraftPrompt ? selectedNode.data.userPrompt : ''); + }; + + // Check if a trace is complete (all upstream nodes have Q&A) + const canQuickChat = (trace: Trace): boolean => { + return isTraceComplete(trace); + }; + + // Helper: Check if all upstream nodes have complete Q&A by traversing edges + const checkUpstreamNodesComplete = (nodeId: string, visited: Set = new Set()): boolean => { + if (visited.has(nodeId)) return true; // Avoid cycles + visited.add(nodeId); + + const node = nodes.find(n => n.id === nodeId); + if (!node) return true; + + // Find all incoming edges to this node + const incomingEdges = edges.filter(e => e.target === nodeId); + + for (const edge of incomingEdges) { + const sourceNode = nodes.find(n => n.id === edge.source); + if (!sourceNode) continue; + + // Check if source node is disabled - skip disabled nodes + if (sourceNode.data.disabled) continue; + + // Check if source node has complete Q&A + if (!sourceNode.data.userPrompt || !sourceNode.data.response) { + return false; // Found an incomplete upstream node + } + + // Recursively check further upstream + if (!checkUpstreamNodesComplete(edge.source, visited)) { + return false; + } + } + + return true; + }; + + // Check if all active traces are complete (for main Run Node button) + const checkActiveTracesComplete = (): { complete: boolean; incompleteTraceId?: string } => { + if (!selectedNode) return { complete: true }; + + // FIRST: Always check if all upstream nodes (via edges) have complete Q&A + // This has highest priority - even if no trace is selected + if (!checkUpstreamNodesComplete(selectedNode.id)) { + return { complete: false, incompleteTraceId: 'upstream' }; + } + + const activeTraceIds = selectedNode.data.activeTraceIds || []; + if (activeTraceIds.length === 0) return { complete: true }; + + // Check incoming traces - these represent upstream context + const incomingTraces = selectedNode.data.traces || []; + for (const traceId of activeTraceIds) { + const trace = incomingTraces.find((t: Trace) => t.id === traceId); + if (trace && !isTraceComplete(trace)) { + return { complete: false, incompleteTraceId: traceId }; + } + } + + // Check outgoing traces (for originated traces) + // But for traces that THIS node originated (self trace, forked traces), + // we only need to check if there are incomplete UPSTREAM messages + // (not the current node's own messages) + const outgoingTraces = selectedNode.data.outgoingTraces || []; + for (const traceId of activeTraceIds) { + const trace = outgoingTraces.find((t: Trace) => t.id === traceId); + if (trace) { + // Filter out current node's own messages + const upstreamMessages = trace.messages.filter(m => !m.id?.startsWith(`${selectedNode.id}-`)); + + // Only check completeness if there are upstream messages + // Empty upstream means this is a head node - that's fine + if (upstreamMessages.length > 0) { + let userCount = 0; + let assistantCount = 0; + for (const msg of upstreamMessages) { + if (msg.role === 'user') userCount++; + if (msg.role === 'assistant') assistantCount++; + } + // Incomplete if unbalanced upstream messages + if (userCount !== assistantCount) { + return { complete: false, incompleteTraceId: traceId }; + } + } + // If no upstream messages, this is a head node - always complete + } + } + + // Check merged traces (all source traces must be complete) + const mergedTraces = selectedNode.data.mergedTraces || []; + for (const traceId of activeTraceIds) { + const merged = mergedTraces.find((m: MergedTrace) => m.id === traceId); + if (merged) { + for (const sourceId of merged.sourceTraceIds) { + const sourceTrace = incomingTraces.find((t: Trace) => t.id === sourceId); + if (sourceTrace && !isTraceComplete(sourceTrace)) { + return { complete: false, incompleteTraceId: sourceId }; + } + } + } + } + + return { complete: true }; + }; + + const activeTracesCheck = selectedNode ? checkActiveTracesComplete() : { complete: true }; + + const handleQuickChatSend = async () => { + if (!quickChatInput.trim() || !quickChatTrace || quickChatLoading || !selectedNode) return; + + const userInput = quickChatInput; + const userMessage: Message = { + id: `qc_${Date.now()}_u`, + role: 'user', + content: userInput + }; + + // Add user message to display + const messagesBeforeSend = [...quickChatMessages]; + setQuickChatMessages(prev => [...prev, userMessage]); + setQuickChatInput(''); + setQuickChatLoading(true); + + // Store model at send time to avoid issues with model switching during streaming + const modelAtSend = quickChatModel; + const tempAtSend = quickChatTemp; + const effortAtSend = quickChatEffort; + const webSearchAtSend = quickChatWebSearch; + + try { + // Determine provider + const isOpenAI = modelAtSend.includes('gpt') || modelAtSend === 'o3'; + const reasoningModels = ['gpt-5', 'gpt-5-chat-latest', 'gpt-5-mini', 'gpt-5-nano', 'gpt-5-pro', 'gpt-5.1', 'gpt-5.1-chat-latest', 'o3']; + const isReasoning = reasoningModels.includes(modelAtSend); + + // Call LLM API with current messages as context + const response = await fetch('http://localhost:8000/api/run_node_stream', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + node_id: 'quick_chat_temp', + incoming_contexts: [{ messages: messagesBeforeSend }], + user_prompt: userInput, + merge_strategy: 'smart', + config: { + provider: isOpenAI ? 'openai' : 'google', + model_name: modelAtSend, + temperature: isReasoning ? 1 : tempAtSend, + enable_google_search: webSearchAtSend, + reasoning_effort: effortAtSend, + } + }) + }); + + if (!response.body) throw new Error('No response body'); + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let fullResponse = ''; + + // Stream response + while (true) { + const { value, done } = await reader.read(); + if (done) break; + const chunk = decoder.decode(value); + fullResponse += chunk; + + // Update display in real-time + setQuickChatMessages(prev => { + const newMsgs = [...prev]; + const lastMsg = newMsgs[newMsgs.length - 1]; + if (lastMsg?.role === 'assistant') { + // Update existing assistant message + return [...newMsgs.slice(0, -1), { ...lastMsg, content: fullResponse }]; + } else { + // Add new assistant message + return [...newMsgs, { id: `qc_${Date.now()}_a`, role: 'assistant', content: fullResponse }]; + } + }); + } + + // Determine whether to overwrite current node or create new one + // Use quickChatTrace.sourceNodeId as the "current" node in the chat flow + // This allows continuous chaining: A -> B -> C + const fromNodeId = quickChatTrace.sourceNodeId; + const fromNode = nodes.find(n => n.id === fromNodeId); + const fromNodeHasResponse = fromNode?.data.response && fromNode.data.response.trim() !== ''; + + if (!fromNodeHasResponse && fromNode) { + // Overwrite the source node (it's empty) + updateNodeData(fromNodeId, { + userPrompt: userInput, + response: fullResponse, + model: modelAtSend, + temperature: isReasoning ? 1 : tempAtSend, + reasoningEffort: effortAtSend, + enableGoogleSearch: webSearchAtSend, + status: 'success', + querySentAt: Date.now(), + responseReceivedAt: Date.now() + }); + + // Update trace to reflect current node now has content + setQuickChatTrace(prev => prev ? { + ...prev, + messages: [...messagesBeforeSend, userMessage, { id: `qc_${Date.now()}_a`, role: 'assistant', content: fullResponse }] + } : null); + + // Generate title + generateTitle(fromNodeId, userInput, fullResponse); + } else { + // Create new node (source node has response, continue the chain) + const newNodeId = `node_${Date.now()}`; + const sourceNode = fromNode || selectedNode; + const newPos = { + x: sourceNode.position.x + 300, + y: sourceNode.position.y + }; + + const newNode = { + id: newNodeId, + type: 'llmNode', + position: newPos, + data: { + label: 'Quick Chat', + model: modelAtSend, + temperature: isReasoning ? 1 : tempAtSend, + systemPrompt: '', + userPrompt: userInput, + mergeStrategy: 'smart' as const, + reasoningEffort: effortAtSend, + enableGoogleSearch: webSearchAtSend, + traces: [], + outgoingTraces: [], + forkedTraces: [], + mergedTraces: [], + activeTraceIds: [], + response: fullResponse, + status: 'success' as const, + inputs: 1, + querySentAt: Date.now(), + responseReceivedAt: Date.now() + } + }; + + addNode(newNode); + + // Connect to the source node + setTimeout(() => { + const store = useFlowStore.getState(); + const currentEdges = store.edges; + const sourceNodeData = store.nodes.find(n => n.id === fromNodeId); + + // Find the right trace handle to use + let sourceHandle = 'new-trace'; + + // Get the base trace ID (e.g., 'trace-A' from 'trace-A_B_C' or 'new-trace-A' or 'merged-xxx') + const currentTraceId = quickChatTrace?.id || ''; + const isNewTrace = currentTraceId.startsWith('new-trace-'); + const isMergedTrace = currentTraceId.startsWith('merged-'); + + if (isMergedTrace) { + // For merged trace: find the merged trace handle on the source node + // The trace ID may have evolved (e.g., 'merged-xxx' -> 'merged-xxx_nodeA' -> 'merged-xxx_nodeA_nodeB') + // We need to find the version that ends with the current source node ID + + // First try: exact match with evolved ID (merged-xxx_sourceNodeId) + const evolvedMergedId = `${currentTraceId}_${fromNodeId}`; + let mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( + t => t.id === evolvedMergedId + ); + + // Second try: find trace that starts with merged ID and ends with this node + if (!mergedOutgoing) { + mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( + t => t.id.startsWith(currentTraceId) && t.id.endsWith(`_${fromNodeId}`) + ); + } + + // Third try: find any trace that contains the merged ID + if (!mergedOutgoing) { + mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( + t => t.id.startsWith(currentTraceId) || t.id === currentTraceId + ); + } + + // Fourth try: find any merged trace + if (!mergedOutgoing) { + mergedOutgoing = sourceNodeData?.data.outgoingTraces?.find( + t => t.id.startsWith('merged-') + ); + } + + if (mergedOutgoing) { + sourceHandle = `trace-${mergedOutgoing.id}`; + } else { + // Last resort: use the merged trace ID directly + sourceHandle = `trace-${currentTraceId}`; + } + } else if (isNewTrace) { + // For "Start New Trace": create a fresh independent trace from the original node + // First, check if this is the original starting node or a continuation node + const originalStartNodeId = currentTraceId.replace('new-trace-', ''); + const isOriginalNode = fromNodeId === originalStartNodeId; + + if (isOriginalNode) { + // This is the first round - starting from original node + const hasOutgoingEdges = currentEdges.some(e => e.source === fromNodeId); + if (hasOutgoingEdges) { + // Original node already has downstream - create a new fork + sourceHandle = 'new-trace'; + } else { + // No downstream yet - use self trace + const selfTrace = sourceNodeData?.data.outgoingTraces?.find( + t => t.id === `trace-${fromNodeId}` + ); + if (selfTrace) { + sourceHandle = `trace-${selfTrace.id}`; + } + } + } else { + // This is a continuation - find the evolved trace that ends at fromNodeId + // Look for a trace that was created from the original node's self trace + const matchingTrace = sourceNodeData?.data.outgoingTraces?.find(t => { + // The trace should end with fromNodeId and contain the original node + return t.id.endsWith(`_${fromNodeId}`) && t.id.includes(originalStartNodeId); + }); + + if (matchingTrace) { + sourceHandle = `trace-${matchingTrace.id}`; + } else { + // Fallback 1: Check INCOMING traces (Connect to Continue Handle) + // This is crucial because pass-through traces are not in outgoingTraces until connected + const incoming = sourceNodeData?.data.traces?.find(t => + t.id.includes(originalStartNodeId) + ); + + if (incoming) { + // Construct evolved ID for continue handle + const evolvedId = `${incoming.id}_${fromNodeId}`; + sourceHandle = `trace-${evolvedId}`; + } else { + // Fallback 2: find any trace that ends with fromNodeId + const anyMatch = sourceNodeData?.data.outgoingTraces?.find( + t => t.id === `trace-${fromNodeId}` || t.id.endsWith(`_${fromNodeId}`) + ); + if (anyMatch) { + sourceHandle = `trace-${anyMatch.id}`; + } + } + } + } + } else { + // For existing trace: find the evolved version of the original trace + const baseTraceId = currentTraceId.replace(/^trace-/, ''); + + // 1. Try OUTGOING traces first (if already connected downstream) + const matchingOutgoing = sourceNodeData?.data.outgoingTraces?.find(t => { + const traceBase = t.id.replace(/^trace-/, ''); + return traceBase.startsWith(baseTraceId) || traceBase === baseTraceId; + }); + + if (matchingOutgoing) { + sourceHandle = `trace-${matchingOutgoing.id}`; + } else { + // 2. Try INCOMING traces (Connect to Continue Handle) + // If we are at Node B, and currentTraceId is "trace-A", + // we look for incoming "trace-A" and use its continue handle "trace-trace-A_B" + const matchingIncoming = sourceNodeData?.data.traces?.find(t => { + const tId = t.id.replace(/^trace-/, ''); + return tId === baseTraceId || baseTraceId.startsWith(tId); + }); + + if (matchingIncoming) { + // Construct the evolved ID: {traceId}_{nodeId} + // Handle ID format in LLMNode is `trace-${evolvedTraceId}` + const evolvedId = `${matchingIncoming.id}_${fromNodeId}`; + sourceHandle = `trace-${evolvedId}`; + } + } + } + + // If this is the first message and we need to duplicate (has downstream), + // onConnect will automatically handle the trace duplication + // because the sourceHandle already has an outgoing edge + + store.onConnect({ + source: fromNodeId, + sourceHandle, + target: newNodeId, + targetHandle: 'input-0' + }); + + // After first duplication, subsequent messages continue on the new trace + // Reset the duplicate flag since we're now on the new branch + setQuickChatNeedsDuplicate(false); + + // Update trace for continued chat - use newNodeId as the new source + // Find the actual trace ID on the new node to ensure continuity + const newNode = store.nodes.find(n => n.id === newNodeId); + const currentId = quickChatTrace?.id || ''; + const isMerged = currentId.startsWith('merged-'); + const isCurrentNewTrace = currentId.startsWith('new-trace-'); + + let nextTraceId = currentId; + + if (newNode && newNode.data.outgoingTraces) { + // Find the trace that continues the current conversation + // It should end with the new node ID + + if (isMerged) { + const evolved = newNode.data.outgoingTraces.find(t => + t.id === `${currentId}_${newNodeId}` + ); + if (evolved) nextTraceId = evolved.id; + } else if (isCurrentNewTrace) { + // For new trace, we look for the trace that originated from the start node + // and now passes through the new node + const startNodeId = currentId.replace('new-trace-', ''); + const match = newNode.data.outgoingTraces.find(t => + t.id.includes(startNodeId) && t.id.endsWith(`_${newNodeId}`) + ); + if (match) nextTraceId = match.id; + // If first step (A->B), might be a direct fork ID + else { + const directFork = newNode.data.outgoingTraces.find(t => + t.id.includes(startNodeId) && t.sourceNodeId === startNodeId + ); + if (directFork) nextTraceId = directFork.id; + } + } else { + // Regular trace: look for evolved version + const baseId = currentId.replace(/^trace-/, ''); + + // 1. Try outgoing traces + const match = newNode.data.outgoingTraces.find(t => + t.id.includes(baseId) && t.id.endsWith(`_${newNodeId}`) + ); + if (match) { + nextTraceId = match.id; + } else { + // 2. If not in outgoing (no downstream yet), construct evolved ID manually + // Check if it's an incoming trace that evolved + const incoming = newNode.data.traces?.find(t => t.id.includes(baseId)); + if (incoming) { + nextTraceId = `${incoming.id}_${newNodeId}`; + } + } + } + } + + setQuickChatTrace(prev => prev ? { + ...prev, + id: nextTraceId, + sourceNodeId: newNodeId, + messages: [...messagesBeforeSend, userMessage, { id: `qc_${Date.now()}_a`, role: 'assistant', content: fullResponse }] + } : null); + + // Generate title + generateTitle(newNodeId, userInput, fullResponse); + }, 100); + } + + } catch (error) { + console.error('Quick chat error:', error); + setQuickChatMessages(prev => [...prev, { + id: `qc_err_${Date.now()}`, + role: 'assistant', + content: `Error: ${error}` + }]); + } finally { + setQuickChatLoading(false); + // Refocus the input after sending + setTimeout(() => { + quickChatInputRef.current?.focus(); + }, 50); + } + }; + return ( -
+
{/* Header */} -
+
handleChange('label', e.target.value)} - className="font-bold text-lg bg-transparent border-none focus:ring-0 focus:outline-none w-full" + className={`font-bold text-lg bg-transparent border-none focus:ring-0 focus:outline-none w-full ${ + isDark ? 'text-gray-200' : 'text-gray-900' + }`} /> -
-
- {selectedNode.data.status} +
+ {selectedNode.data.status}
-
+
ID: {selectedNode.id}
{/* Tabs */} -
+
- {/* Trace Selector */} - {selectedNode.data.traces && selectedNode.data.traces.length > 0 && ( -
- + {/* Trace Selector - Single Select */} +
+
+ + {/* Create Merged Trace Button - only show if 2+ traces */} + {selectedNode.data.traces && selectedNode.data.traces.length >= 2 && ( + + )} +
+ + {/* New Trace option */} +
+
+
+ Start New Trace +
+ +
+ + {/* All Available Traces - Incoming + Outgoing that this node originated */} + {(() => { + // 1. Incoming traces (context from upstream) + const incomingTraces = selectedNode.data.traces || []; + + // 2. Outgoing traces that this node ORIGINATED (not pass-through, not merged) + // This includes self-started traces, forked traces, and prepend traces + const outgoingTraces = (selectedNode.data.outgoingTraces || []) as Trace[]; + const originatedTraces = outgoingTraces.filter(t => { + // Exclude merged traces - they have their own display section + if (t.id.startsWith('merged-')) return false; + + // Include if this node is the source (originated here) + // OR if the trace ID matches a forked/prepend trace pattern from this node + const isOriginated = t.sourceNodeId === selectedNode.id; + const isForkedHere = t.id.includes(`fork-${selectedNode.id}`); + const isSelfTrace = t.id === `trace-${selectedNode.id}`; + return isOriginated || isForkedHere || isSelfTrace; + }); + + // Combine and deduplicate by ID + // Priority: incoming traces (have full context) > originated outgoing traces + const allTracesMap = new Map(); + // Add originated outgoing traces first + originatedTraces.forEach(t => allTracesMap.set(t.id, t)); + // Then incoming traces (will overwrite if same ID, as they have fuller context) + incomingTraces.forEach(t => allTracesMap.set(t.id, t)); + const allTraces = Array.from(allTracesMap.values()); + + if (allTraces.length === 0) return null; + + return (
- {selectedNode.data.traces.map((trace) => { + {allTraces.map((trace: Trace) => { const isActive = selectedNode.data.activeTraceIds?.includes(trace.id); + const isComplete = canQuickChat(trace); + return ( -
{ - const current = selectedNode.data.activeTraceIds || []; - const next = [trace.id]; // Single select mode - handleChange('activeTraceIds', next); - }} +
handleChange('activeTraceIds', [trace.id])} + className={`flex items-start gap-2 text-sm p-1.5 rounded group cursor-pointer transition-all ${ + isActive + ? isDark ? 'bg-blue-900/50 border border-blue-700' : 'bg-blue-50 border border-blue-200' + : isDark ? 'hover:bg-gray-800' : 'hover:bg-white' + }`} > = ({ isOpen, onToggle }) => { readOnly className="mt-1" /> +
-
-
- #{trace.id.slice(-4)} -
-
- From Node: {trace.sourceNodeId} -
-
- {trace.messages.length} msgs -
+
+
+ #{trace.id.slice(-4)} + {!isComplete && ( + (incomplete) + )} +
+
+ {trace.messages.length} msgs +
+ + {/* Quick Chat Button */} + {(() => { + const hasDownstream = edges.some(e => + e.source === selectedNode.id && + e.sourceHandle?.startsWith('trace-') + ); + const buttonLabel = hasDownstream ? "Duplicate & Quick Chat" : "Quick Chat"; + + return ( + + ); + })()}
); })}
-
- )} + ); + })()} + + {/* Merged Traces - also single selectable */} + {selectedNode.data.mergedTraces && selectedNode.data.mergedTraces.length > 0 && ( +
+ + {selectedNode.data.mergedTraces.map((merged: MergedTrace) => { + const isActive = selectedNode.data.activeTraceIds?.includes(merged.id); + + return ( +
handleChange('activeTraceIds', [merged.id])} + className={`flex items-center gap-2 p-1.5 rounded text-xs cursor-pointer transition-all ${ + isActive + ? isDark ? 'bg-purple-900/50 border border-purple-600' : 'bg-purple-50 border border-purple-300' + : isDark ? 'bg-gray-800 hover:bg-gray-700' : 'bg-white border border-gray-200 hover:bg-gray-50' + }`} + > + + + {/* Alternating color indicator */} +
+ {merged.colors.slice(0, 3).map((color, idx) => ( +
+ ))} + {merged.colors.length > 3 && ( +
+ +{merged.colors.length - 3} +
+ )} +
+ +
+
+ Merged #{merged.id.slice(-6)} +
+
+ {merged.strategy} • {merged.messages.length} msgs +
+
+ + {/* Quick Chat for Merged Trace */} + + + +
+ ); + })} +
+ )} +