summaryrefslogtreecommitdiff
path: root/frontend/src/store/flowStore.ts
diff options
context:
space:
mode:
Diffstat (limited to 'frontend/src/store/flowStore.ts')
-rw-r--r--frontend/src/store/flowStore.ts2154
1 files changed, 2037 insertions, 117 deletions
diff --git a/frontend/src/store/flowStore.ts b/frontend/src/store/flowStore.ts
index d2114aa..a919498 100644
--- a/frontend/src/store/flowStore.ts
+++ b/frontend/src/store/flowStore.ts
@@ -10,17 +10,42 @@ import {
type NodeChange,
type OnNodesChange,
type OnEdgesChange,
- type OnConnect,
- getIncomers,
- getOutgoers
+ type OnConnect
} from 'reactflow';
+// --- Project / Blueprint types ---
+export interface ViewportState {
+ x: number;
+ y: number;
+ zoom: number;
+}
+
+export interface BlueprintDocument {
+ version: number;
+ nodes: Node<NodeData>[];
+ edges: Edge[];
+ viewport?: ViewportState;
+ theme?: 'light' | 'dark';
+}
+
+export interface FSItem {
+ name: string;
+ path: string; // path relative to user root
+ type: 'file' | 'folder';
+ size?: number | null;
+ mtime?: number | null;
+ children?: FSItem[];
+}
+
export type NodeStatus = 'idle' | 'loading' | 'success' | 'error';
+export type SaveStatus = 'idle' | 'saving' | 'saved' | 'error';
export interface Message {
id?: string;
role: 'user' | 'assistant' | 'system';
content: string;
+ sourceTraceId?: string; // For merged traces: which trace this message came from
+ sourceTraceColor?: string; // For merged traces: color of the source trace
}
export interface Trace {
@@ -28,6 +53,24 @@ export interface Trace {
sourceNodeId: string;
color: string;
messages: Message[];
+ // Optional merged trace info for visual propagation
+ isMerged?: boolean;
+ mergedColors?: string[];
+ sourceTraceIds?: string[];
+}
+
+// Merge strategy types
+export type MergeStrategy = 'query_time' | 'response_time' | 'trace_order' | 'grouped' | 'interleaved' | 'summary';
+
+// Merged trace - combines multiple traces with a strategy
+export interface MergedTrace {
+ id: string;
+ sourceNodeId: string;
+ sourceTraceIds: string[]; // IDs of traces being merged (in order for trace_order)
+ strategy: MergeStrategy;
+ colors: string[]; // Colors from source traces (for alternating display)
+ messages: Message[]; // Computed merged messages
+ summarizedContent?: string; // For summary strategy, stores the LLM-generated summary
}
export interface NodeData {
@@ -38,31 +81,85 @@ export interface NodeData {
systemPrompt: string;
userPrompt: string;
mergeStrategy: 'raw' | 'smart';
+ enableGoogleSearch?: boolean;
+ reasoningEffort: 'low' | 'medium' | 'high'; // For OpenAI reasoning models
+ attachedFileIds?: string[]; // IDs of files attached to this node
+ disabled?: boolean; // Greyed out, no interaction
// Traces logic
traces: Trace[]; // INCOMING Traces
- outgoingTraces: Trace[]; // ALL Outgoing (inherited + self + forks)
+ outgoingTraces: Trace[]; // ALL Outgoing (inherited + self + forks + merged)
forkedTraces: Trace[]; // Manually created forks from "New" handle
+ mergedTraces: MergedTrace[]; // Merged traces from multiple inputs
activeTraceIds: string[];
response: string;
status: NodeStatus;
- inputs: number;
+ inputs: number;
+
+ // Timestamps for merge logic
+ querySentAt?: number; // Unix timestamp when query was sent
+ responseReceivedAt?: number; // Unix timestamp when response was received
+
[key: string]: any;
}
export type LLMNode = Node<NodeData>;
+// Archived node template (for reuse)
+export interface ArchivedNode {
+ id: string;
+ label: string;
+ model: string;
+ systemPrompt: string;
+ temperature: number;
+ reasoningEffort: 'low' | 'medium' | 'high';
+ userPrompt?: string;
+ response?: string;
+ enableGoogleSearch?: boolean;
+ attachedFileIds?: string[];
+ mergeStrategy?: 'raw' | 'smart';
+}
+
+export interface FileMeta {
+ id: string;
+ name: string;
+ size: number;
+ mime: string;
+ created_at: number;
+ provider?: string;
+ provider_file_id?: string;
+ scopes?: string[]; // "project_path/node_id" composite keys
+}
+
+export type UploadFileProvider = 'local' | 'openai' | 'google';
+
+export interface UploadFileOptions {
+ provider?: UploadFileProvider;
+ purpose?: string;
+}
+
interface FlowState {
nodes: LLMNode[];
edges: Edge[];
selectedNodeId: string | null;
+ archivedNodes: ArchivedNode[]; // Stored node templates
+ files: FileMeta[];
+ uploadingFileIds?: string[];
+ theme: 'light' | 'dark';
+ projectTree: FSItem[];
+ currentBlueprintPath?: string;
+ lastViewport?: ViewportState;
+ saveStatus: SaveStatus;
onNodesChange: OnNodesChange;
onEdgesChange: OnEdgesChange;
onConnect: OnConnect;
addNode: (node: LLMNode) => void;
+ toggleTheme: () => void;
+ autoLayout: () => void;
+ findNonOverlappingPosition: (baseX: number, baseY: number) => { x: number; y: number };
updateNodeData: (nodeId: string, data: Partial<NodeData>) => void;
setSelectedNode: (nodeId: string | null) => void;
@@ -72,6 +169,71 @@ interface FlowState {
deleteEdge: (edgeId: string) => void;
deleteNode: (nodeId: string) => void;
deleteBranch: (startNodeId?: string, startEdgeId?: string) => void;
+ deleteTrace: (startEdgeId: string) => void;
+
+ // Archive actions
+ toggleNodeDisabled: (nodeId: string) => void;
+ archiveNode: (nodeId: string) => void;
+ removeFromArchive: (archiveId: string) => void;
+ createNodeFromArchive: (archiveId: string, position: { x: number; y: number }) => void;
+
+ // Trace disable
+ toggleTraceDisabled: (edgeId: string) => void;
+ updateEdgeStyles: () => void;
+
+ // Quick Chat helpers
+ isTraceComplete: (trace: Trace) => boolean;
+ getTraceNodeIds: (trace: Trace) => string[];
+ createQuickChatNode: (
+ fromNodeId: string,
+ trace: Trace | null,
+ userPrompt: string,
+ response: string,
+ model: string,
+ config: Partial<NodeData>
+ ) => string; // Returns new node ID
+
+ // Blueprint serialization / persistence
+ serializeBlueprint: (viewport?: ViewportState) => BlueprintDocument;
+ loadBlueprint: (doc: BlueprintDocument) => ViewportState | undefined;
+ saveBlueprintFile: (path: string, viewport?: ViewportState) => Promise<void>;
+ readBlueprintFile: (path: string) => Promise<BlueprintDocument>;
+ refreshProjectTree: () => Promise<FSItem[]>;
+ createProjectFolder: (path: string) => Promise<void>;
+ renameProjectItem: (path: string, newName?: string, newPath?: string) => Promise<void>;
+ deleteProjectItem: (path: string, isFolder?: boolean) => Promise<void>;
+ setCurrentBlueprintPath: (path?: string) => void;
+ setLastViewport: (viewport: ViewportState) => void;
+ saveCurrentBlueprint: (path?: string, viewport?: ViewportState) => Promise<void>;
+ clearBlueprint: () => void;
+ loadArchivedNodes: () => Promise<void>;
+ saveArchivedNodes: () => Promise<void>;
+ refreshFiles: () => Promise<void>;
+ uploadFile: (file: File, options?: UploadFileOptions) => Promise<FileMeta>;
+ deleteFile: (fileId: string) => Promise<void>;
+ setFiles: (files: FileMeta[]) => void;
+ setUploading: (ids: string[]) => void;
+ addFileScope: (fileId: string, scope: string) => Promise<void>;
+ removeFileScope: (fileId: string, scope: string) => Promise<void>;
+
+ // Merge trace functions
+ createMergedTrace: (
+ nodeId: string,
+ sourceTraceIds: string[],
+ strategy: MergeStrategy
+ ) => string; // Returns merged trace ID
+ updateMergedTrace: (
+ nodeId: string,
+ mergedTraceId: string,
+ updates: { sourceTraceIds?: string[]; strategy?: MergeStrategy; summarizedContent?: string }
+ ) => void;
+ deleteMergedTrace: (nodeId: string, mergedTraceId: string) => void;
+ computeMergedMessages: (
+ nodeId: string,
+ sourceTraceIds: string[],
+ strategy: MergeStrategy,
+ tracesOverride?: Trace[]
+ ) => Message[];
propagateTraces: () => void;
}
@@ -86,15 +248,203 @@ const getStableColor = (str: string) => {
return `hsl(${hue}, 70%, 60%)`;
};
-const useFlowStore = create<FlowState>((set, get) => ({
+import { useAuthStore } from './authStore';
+
+const API_BASE = import.meta.env.VITE_BACKEND_URL || '';
+const DEFAULT_USER = 'test'; // Fallback for unauthenticated requests
+
+// Get current username directly from authStore
+const getCurrentUser = () => {
+ const authState = useAuthStore.getState();
+ return authState.user?.username || DEFAULT_USER;
+};
+
+// Get auth headers directly from authStore
+const getAuthHeaders = (): Record<string, string> => {
+ const authState = useAuthStore.getState();
+ if (authState.token) {
+ return { Authorization: `Bearer ${authState.token}` };
+ }
+ return {};
+};
+
+const jsonFetch = async <T>(url: string, options?: RequestInit): Promise<T> => {
+ const authHeaders = getAuthHeaders();
+ const res = await fetch(url, {
+ ...options,
+ headers: {
+ ...options?.headers,
+ ...authHeaders,
+ },
+ });
+ if (!res.ok) {
+ const detail = await res.text();
+ throw new Error(detail || `Request failed: ${res.status}`);
+ }
+ return res.json() as Promise<T>;
+};
+
+const useFlowStore = create<FlowState>((set, get) => {
+
+ const validateBlueprint = (doc: any): BlueprintDocument => {
+ if (!doc || typeof doc !== 'object') throw new Error('Invalid blueprint: not an object');
+ if (typeof doc.version !== 'number') throw new Error('Invalid blueprint: missing version');
+ if (!Array.isArray(doc.nodes) || !Array.isArray(doc.edges)) throw new Error('Invalid blueprint: nodes/edges missing');
+ return doc as BlueprintDocument;
+ };
+
+ return {
nodes: [],
edges: [],
selectedNodeId: null,
+ archivedNodes: [],
+ files: [],
+ uploadingFileIds: [],
+ theme: 'light' as const,
+ projectTree: [],
+ currentBlueprintPath: undefined,
+ lastViewport: undefined,
+ saveStatus: 'idle',
+
+ toggleTheme: () => {
+ const newTheme = get().theme === 'light' ? 'dark' : 'light';
+ set({ theme: newTheme });
+ // Update document class for global CSS
+ if (newTheme === 'dark') {
+ document.documentElement.classList.add('dark');
+ } else {
+ document.documentElement.classList.remove('dark');
+ }
+ },
+
+ setLastViewport: (viewport: ViewportState) => {
+ set({ lastViewport: viewport });
+ },
+
+ setFiles: (files: FileMeta[]) => {
+ set({ files });
+ },
+ setUploading: (ids: string[]) => {
+ set({ uploadingFileIds: ids });
+ },
+ findNonOverlappingPosition: (baseX: number, baseY: number) => {
+ const { nodes } = get();
+ // Estimate larger dimensions to be safe, considering dynamic handles
+ const nodeWidth = 300;
+ const nodeHeight = 200;
+ const padding = 20;
+
+ let x = baseX;
+ let y = baseY;
+ let attempts = 0;
+ const maxAttempts = 100; // Increase attempts
+
+ const isOverlapping = (testX: number, testY: number) => {
+ return nodes.some(node => {
+ const nodeX = node.position.x;
+ const nodeY = node.position.y;
+ return !(testX + nodeWidth + padding < nodeX ||
+ testX > nodeX + nodeWidth + padding ||
+ testY + nodeHeight + padding < nodeY ||
+ testY > nodeY + nodeHeight + padding);
+ });
+ };
+
+ // Try positions in a spiral pattern
+ while (isOverlapping(x, y) && attempts < maxAttempts) {
+ attempts++;
+ // Spiral parameters
+ const angle = attempts * 0.5; // Slower rotation
+ const radius = 50 + attempts * 30; // Faster expansion
+
+ x = baseX + Math.cos(angle) * radius;
+ y = baseY + Math.sin(angle) * radius;
+ }
+
+ return { x, y };
+ },
+
+ autoLayout: () => {
+ const { nodes, edges } = get();
+ if (nodes.length === 0) return;
+
+ // Find root nodes (no incoming edges)
+ const nodesWithIncoming = new Set(edges.map(e => e.target));
+ const rootNodes = nodes.filter(n => !nodesWithIncoming.has(n.id));
+
+ // BFS to layout nodes in levels
+ const nodePositions: Map<string, { x: number; y: number }> = new Map();
+ const visited = new Set<string>();
+ const queue: { id: string; level: number; index: number }[] = [];
+
+ const horizontalSpacing = 350;
+ const verticalSpacing = 150;
+
+ // Initialize with root nodes
+ rootNodes.forEach((node, index) => {
+ queue.push({ id: node.id, level: 0, index });
+ visited.add(node.id);
+ });
+
+ // Track nodes per level for vertical positioning
+ const nodesPerLevel: Map<number, number> = new Map();
+
+ while (queue.length > 0) {
+ const { id, level } = queue.shift()!;
+
+ // Count nodes at this level
+ const currentCount = nodesPerLevel.get(level) || 0;
+ nodesPerLevel.set(level, currentCount + 1);
+
+ // Calculate position
+ const x = 100 + level * horizontalSpacing;
+ const y = 100 + currentCount * verticalSpacing;
+ nodePositions.set(id, { x, y });
+
+ // Find child nodes
+ const outgoingEdges = edges.filter(e => e.source === id);
+ outgoingEdges.forEach((edge, i) => {
+ if (!visited.has(edge.target)) {
+ visited.add(edge.target);
+ queue.push({ id: edge.target, level: level + 1, index: i });
+ }
+ });
+ }
+
+ // Handle orphan nodes (not connected to anything)
+ let orphanY = 100;
+ nodes.forEach(node => {
+ if (!nodePositions.has(node.id)) {
+ nodePositions.set(node.id, { x: 100, y: orphanY });
+ orphanY += verticalSpacing;
+ }
+ });
+
+ // Apply positions
+ set({
+ nodes: nodes.map(node => ({
+ ...node,
+ position: nodePositions.get(node.id) || node.position
+ }))
+ });
+ },
onNodesChange: (changes: NodeChange[]) => {
+ // Check if any nodes are being removed
+ const hasRemovals = changes.some(c => c.type === 'remove');
+
set({
nodes: applyNodeChanges(changes, get().nodes) as LLMNode[],
});
+
+ // If nodes were removed, also clean up related edges and propagate traces
+ if (hasRemovals) {
+ const removedIds = changes.filter(c => c.type === 'remove').map(c => c.id);
+ set({
+ edges: get().edges.filter(e => !removedIds.includes(e.source) && !removedIds.includes(e.target))
+ });
+ get().propagateTraces();
+ }
},
onEdgesChange: (changes: EdgeChange[]) => {
set({
@@ -103,52 +453,447 @@ const useFlowStore = create<FlowState>((set, get) => ({
get().propagateTraces();
},
onConnect: (connection: Connection) => {
- const { nodes } = get();
+ const { nodes, edges } = get();
+ const sourceNode = nodes.find(n => n.id === connection.source);
+ if (!sourceNode) return;
- // Check if connecting from "new-trace" handle
- if (connection.sourceHandle === 'new-trace') {
- // Logic: Create a new Forked Trace on the source node
- const sourceNode = nodes.find(n => n.id === connection.source);
- if (sourceNode) {
- // Generate the content for this new trace (it's essentially the Self Trace of this node)
- const myResponseMsg: Message[] = [];
- if (sourceNode.data.userPrompt) myResponseMsg.push({ id: `${sourceNode.id}-u`, role: 'user', content: sourceNode.data.userPrompt });
- if (sourceNode.data.response) myResponseMsg.push({ id: `${sourceNode.id}-a`, role: 'assistant', content: sourceNode.data.response });
-
- const newForkId = `trace-${sourceNode.id}-fork-${Date.now()}`;
- const newForkTrace: Trace = {
- id: newForkId,
- sourceNodeId: sourceNode.id,
- color: getStableColor(newForkId), // Unique color for this fork
- messages: [...myResponseMsg]
- };
-
- // Update Source Node to include this fork
- get().updateNodeData(sourceNode.id, {
- forkedTraces: [...(sourceNode.data.forkedTraces || []), newForkTrace]
+ // Handle prepend connections - only allow from 'new-trace' handle
+ // Prepend makes the source node become the NEW HEAD of an existing trace
+ if (connection.targetHandle?.startsWith('prepend-')) {
+ // Prepend connections MUST come from 'new-trace' handle
+ if (connection.sourceHandle !== 'new-trace') {
+ console.warn('Prepend connections only allowed from new-trace handle');
+ return;
+ }
+
+ const targetNode = nodes.find(n => n.id === connection.target);
+ if (!targetNode) return;
+
+ // Get the trace ID from the prepend handle - this is the trace we're joining
+ const traceId = connection.targetHandle.replace('prepend-', '').replace('inherited-', '');
+ const regularTrace = targetNode.data.outgoingTraces?.find(t => t.id === traceId);
+ const mergedTrace = targetNode.data.mergedTraces?.find(m => m.id === traceId);
+ const traceColor = regularTrace?.color || (mergedTrace?.colors?.[0]) || getStableColor(traceId);
+
+ // Instead of creating a new trace, we JOIN the existing trace
+ // The source node (C) becomes the new head of the trace
+ // We use the SAME trace ID so it's truly the same trace
+
+ // Add this trace as a "forked trace" on the source node so it shows up as an output handle
+ // But use the ORIGINAL trace ID (not a new prepend-xxx ID)
+ const inheritedTrace: Trace = {
+ id: traceId, // Use the SAME trace ID!
+ sourceNodeId: sourceNode.id, // C is now the source/head
+ color: traceColor,
+ messages: [] // Will be populated by propagateTraces
+ };
+
+ // Add to source node's forkedTraces
+ get().updateNodeData(sourceNode.id, {
+ forkedTraces: [...(sourceNode.data.forkedTraces || []), inheritedTrace]
+ });
+
+ // Update target node's forkedTrace to mark it as "prepended"
+ // by setting sourceNodeId to the new head (source node)
+ const targetForked = targetNode.data.forkedTraces || [];
+ const updatedForked = targetForked.map(t =>
+ t.id === traceId ? { ...t, sourceNodeId: sourceNode.id } : t
+ );
+ if (JSON.stringify(updatedForked) !== JSON.stringify(targetForked)) {
+ get().updateNodeData(targetNode.id, {
+ forkedTraces: updatedForked
+ });
+ }
+
+ // Create the edge using the SAME trace ID
+ // This connects C's output to A's prepend input
+ set({
+ edges: addEdge({
+ ...connection,
+ sourceHandle: `trace-${traceId}`, // Same trace ID!
+ style: { stroke: traceColor, strokeWidth: 2 }
+ }, get().edges),
+ });
+
+ setTimeout(() => get().propagateTraces(), 0);
+ return;
+ }
+
+ // Helper to trace back the path of a trace by following edges upstream
+ const duplicateTracePath = (
+ traceId: string,
+ forkAtNodeId: string,
+ traceOwnerNodeId?: string,
+ pendingEdges: Edge[] = []
+ ): { newTraceId: string, newEdges: Edge[], firstNodeId: string } | null => {
+ // Trace back from forkAtNodeId to find the origin of this trace
+ // We follow incoming edges that match the trace pattern
+
+ const pathNodes: string[] = [forkAtNodeId];
+ const pathEdges: Edge[] = [];
+ let currentNodeId = forkAtNodeId;
+
+ // Trace backwards through incoming edges
+ while (true) {
+ // Find incoming edge to current node that carries THIS trace ID
+ const incomingEdge = edges.find(e =>
+ e.target === currentNodeId &&
+ e.sourceHandle === `trace-${traceId}`
+ );
+
+ if (!incomingEdge) break; // Reached the start of the trace
+
+ pathNodes.unshift(incomingEdge.source);
+ pathEdges.unshift(incomingEdge);
+ currentNodeId = incomingEdge.source;
+ }
+
+ // If path only has one node, no upstream to duplicate
+ if (pathNodes.length <= 1) return null;
+
+ const firstNodeId = pathNodes[0];
+ const firstNode = nodes.find(n => n.id === firstNodeId);
+ if (!firstNode) return null;
+
+ // Create a new trace ID for the duplicated path (guarantee uniqueness even within the same ms)
+ const uniq = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
+ const newTraceId = `fork-${firstNodeId}-${uniq}`;
+ const newTraceColor = getStableColor(newTraceId);
+
+ // Create new edges for the entire path
+ const newEdges: Edge[] = [];
+
+ // Track which input handles we're creating for new edges
+ const newInputHandles: Map<string, number> = new Map();
+
+ for (let i = 0; i < pathEdges.length; i++) {
+ const fromNodeId = pathNodes[i];
+ const toNodeId = pathNodes[i + 1];
+
+ // Find the next available input handle for the target node
+ // Count existing edges to this node + any new edges we're creating
+ const existingEdgesToTarget =
+ edges.filter(e => e.target === toNodeId).length +
+ pendingEdges.filter(e => e.target === toNodeId).length;
+ const newEdgesToTarget = newInputHandles.get(toNodeId) || 0;
+ const nextInputIndex = existingEdgesToTarget + newEdgesToTarget;
+ newInputHandles.set(toNodeId, newEdgesToTarget + 1);
+
+ newEdges.push({
+ id: `edge-fork-${uniq}-${i}`,
+ source: fromNodeId,
+ target: toNodeId,
+ sourceHandle: `trace-${newTraceId}`,
+ targetHandle: `input-${nextInputIndex}`,
+ style: { stroke: newTraceColor, strokeWidth: 2 }
+ });
+ }
+
+ // Find the messages up to the fork point
+ const traceOwnerNode = traceOwnerNodeId
+ ? nodes.find(n => n.id === traceOwnerNodeId)
+ : sourceNode;
+ if (!traceOwnerNode) return null;
+
+ const originalTrace = traceOwnerNode.data.outgoingTraces?.find(t => t.id === traceId);
+ const messagesUpToFork = originalTrace?.messages || [];
+
+ // Add the new trace as a forked trace on the first node
+ const newForkTrace: Trace = {
+ id: newTraceId,
+ sourceNodeId: firstNodeId,
+ color: newTraceColor,
+ messages: [...messagesUpToFork]
+ };
+
+ get().updateNodeData(firstNodeId, {
+ forkedTraces: [...(firstNode.data.forkedTraces || []), newForkTrace]
+ });
+
+ return { newTraceId, newEdges, firstNodeId };
+ };
+
+ // Helper to duplicate the downstream segment of a trace from a start node to an end node
+ const duplicateDownstreamSegment = (
+ originalTraceId: string,
+ startNodeId: string,
+ endNodeId: string,
+ newTraceId: string,
+ newTraceColor: string,
+ newTraceColors: string[]
+ ): Edge[] | null => {
+ const segmentEdges: Edge[] = [];
+ let currentNodeId = startNodeId;
+ const visitedEdgeIds = new Set<string>();
+
+ while (currentNodeId !== endNodeId) {
+ const nextEdge = edges.find(
+ (e) => e.source === currentNodeId && e.sourceHandle === `trace-${originalTraceId}`
+ );
+
+ if (!nextEdge || visitedEdgeIds.has(nextEdge.id)) {
+ return null;
+ }
+
+ segmentEdges.push(nextEdge);
+ visitedEdgeIds.add(nextEdge.id);
+ currentNodeId = nextEdge.target;
+ }
+
+ const newEdges: Edge[] = [];
+ const newInputCounts: Map<string, number> = new Map();
+ const segmentTimestamp = Date.now();
+
+ segmentEdges.forEach((edge, index) => {
+ const targetNodeId = edge.target;
+ const existingEdgesToTarget = edges.filter((e) => e.target === targetNodeId).length;
+ const additionalEdges = newInputCounts.get(targetNodeId) || 0;
+ const nextInputIndex = existingEdgesToTarget + additionalEdges;
+ newInputCounts.set(targetNodeId, additionalEdges + 1);
+
+ newEdges.push({
+ id: `edge-merged-seg-${segmentTimestamp}-${index}`,
+ source: edge.source,
+ target: edge.target,
+ sourceHandle: `trace-${newTraceId}`,
+ targetHandle: `input-${nextInputIndex}`,
+ type: 'merged',
+ style: { stroke: newTraceColor, strokeWidth: 2 },
+ data: { isMerged: true, colors: newTraceColors }
+ });
+ });
+
+ return newEdges;
+ };
+
+ // Helper to duplicate a merged trace by cloning its parent traces and creating a new merged branch
+ const duplicateMergedTraceBranch = (
+ mergedTrace: Trace,
+ forkAtNodeId: string
+ ): { newTraceId: string; newEdges: Edge[]; color: string } | null => {
+ const mergeNodeId = mergedTrace.sourceNodeId;
+ const mergeNode = nodes.find((n) => n.id === mergeNodeId);
+ if (!mergeNode) return null;
+
+ const mergedDef =
+ mergeNode.data.mergedTraces?.find((m: MergedTrace) => m.id === mergedTrace.id) || null;
+ const parentTraceIds = mergedTrace.sourceTraceIds || mergedDef?.sourceTraceIds || [];
+ if (parentTraceIds.length === 0) return null;
+
+ let accumulatedEdges: Edge[] = [];
+ const newParentTraceIds: string[] = [];
+ const parentOverrides: Trace[] = [];
+
+ for (const parentId of parentTraceIds) {
+ const originalParentTrace = mergeNode.data.traces?.find((t: Trace) => t.id === parentId);
+
+ if (originalParentTrace?.isMerged && originalParentTrace.sourceTraceIds?.length) {
+ const nestedDuplicate = duplicateMergedTraceBranch(originalParentTrace, mergeNodeId);
+ if (!nestedDuplicate) {
+ return null;
+ }
+ accumulatedEdges = accumulatedEdges.concat(nestedDuplicate.newEdges);
+ newParentTraceIds.push(nestedDuplicate.newTraceId);
+
+ parentOverrides.push({
+ ...originalParentTrace,
+ id: nestedDuplicate.newTraceId,
});
-
- // Redirect connection to the new handle
- // Note: We must wait for propagateTraces to render the new handle?
- // ReactFlow might complain if handle doesn't exist yet.
- // But since we updateNodeData synchronously (mostly), it might work.
- // Let's use the new ID for the connection.
-
+
+ continue;
+ }
+
+ const duplicateResult = duplicateTracePath(parentId, mergeNodeId, mergeNodeId, accumulatedEdges);
+ if (!duplicateResult) {
+ return null;
+ }
+ accumulatedEdges = accumulatedEdges.concat(duplicateResult.newEdges);
+ newParentTraceIds.push(duplicateResult.newTraceId);
+
+ if (originalParentTrace) {
+ parentOverrides.push({
+ ...originalParentTrace,
+ id: duplicateResult.newTraceId,
+ });
+ }
+ }
+
+ const strategy = mergedDef?.strategy || 'trace_order';
+ const uniqMerged = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
+ const newMergedId = `merged-${mergeNodeId}-${uniqMerged}`;
+ const newColors =
+ parentOverrides.length > 0
+ ? parentOverrides.map((t) => t.color).filter((c): c is string => Boolean(c))
+ : mergedTrace.mergedColors ?? [];
+
+ const overrideTraces = parentOverrides.length > 0 ? parentOverrides : undefined;
+ const mergedMessages = get().computeMergedMessages(
+ mergeNodeId,
+ newParentTraceIds,
+ strategy,
+ overrideTraces
+ );
+
+ const newMergedDefinition: MergedTrace = {
+ id: newMergedId,
+ sourceNodeId: mergeNodeId,
+ sourceTraceIds: newParentTraceIds,
+ strategy,
+ colors: newColors.length ? newColors : [mergedTrace.color],
+ messages: mergedMessages,
+ };
+
+ const existingMerged = mergeNode.data.mergedTraces || [];
+ get().updateNodeData(mergeNodeId, {
+ mergedTraces: [...existingMerged, newMergedDefinition],
+ });
+
+ const newMergedColor = newColors[0] || mergedTrace.color || getStableColor(newMergedId);
+ const downstreamEdges = duplicateDownstreamSegment(
+ mergedTrace.id,
+ mergeNodeId,
+ forkAtNodeId,
+ newMergedId,
+ newMergedColor,
+ newColors.length ? newColors : [mergedTrace.color]
+ );
+ if (!downstreamEdges) return null;
+
+ accumulatedEdges = accumulatedEdges.concat(downstreamEdges);
+
+ return {
+ newTraceId: newMergedId,
+ newEdges: accumulatedEdges,
+ color: newMergedColor,
+ };
+ };
+
+ // Helper to create a simple forked trace (for new-trace handle or first connection)
+ const createSimpleForkTrace = () => {
+ let originalTraceMessages: Message[] = [];
+
+ if (connection.sourceHandle?.startsWith('trace-')) {
+ const originalTraceId = connection.sourceHandle.replace('trace-', '');
+ const originalTrace = sourceNode.data.outgoingTraces?.find(t => t.id === originalTraceId);
+ if (originalTrace) {
+ originalTraceMessages = [...originalTrace.messages];
+ }
+ }
+
+ if (originalTraceMessages.length === 0) {
+ if (sourceNode.data.userPrompt) {
+ originalTraceMessages.push({ id: `${sourceNode.id}-u`, role: 'user', content: sourceNode.data.userPrompt });
+ }
+ if (sourceNode.data.response) {
+ originalTraceMessages.push({ id: `${sourceNode.id}-a`, role: 'assistant', content: sourceNode.data.response });
+ }
+ }
+
+ const newForkId = `fork-${sourceNode.id}-${Date.now()}`;
+ const newForkTrace: Trace = {
+ id: newForkId,
+ sourceNodeId: sourceNode.id,
+ color: getStableColor(newForkId),
+ messages: originalTraceMessages
+ };
+
+ get().updateNodeData(sourceNode.id, {
+ forkedTraces: [...(sourceNode.data.forkedTraces || []), newForkTrace]
+ });
+
+ return newForkTrace;
+ };
+
+ // Check if connecting from "new-trace" handle - always create simple fork
+ if (connection.sourceHandle === 'new-trace') {
+ const newForkTrace = createSimpleForkTrace();
+
+ set({
+ edges: addEdge({
+ ...connection,
+ sourceHandle: `trace-${newForkTrace.id}`,
+ style: { stroke: newForkTrace.color, strokeWidth: 2 }
+ }, get().edges),
+ });
+
+ setTimeout(() => get().propagateTraces(), 0);
+ return;
+ }
+
+ // Check if this trace handle already has a downstream connection
+ const existingEdgeFromHandle = edges.find(
+ e => e.source === connection.source && e.sourceHandle === connection.sourceHandle
+ );
+
+ if (existingEdgeFromHandle && connection.sourceHandle?.startsWith('trace-')) {
+ const originalTraceId = connection.sourceHandle.replace('trace-', '');
+ const traceMeta = sourceNode.data.outgoingTraces?.find((t: Trace) => t.id === originalTraceId);
+
+ if (traceMeta?.isMerged && traceMeta.sourceTraceIds && traceMeta.sourceTraceIds.length > 0) {
+ const mergedDuplicate = duplicateMergedTraceBranch(traceMeta, connection.source!);
+ if (mergedDuplicate) {
set({
- edges: addEdge({
- ...connection,
- sourceHandle: `trace-${newForkId}`, // Redirect!
- style: { stroke: newForkTrace.color, strokeWidth: 2 }
- }, get().edges),
+ edges: [
+ ...get().edges,
+ ...mergedDuplicate.newEdges,
+ {
+ id: `edge-${connection.source}-${connection.target}-${Date.now()}`,
+ source: connection.source!,
+ target: connection.target!,
+ sourceHandle: `trace-${mergedDuplicate.newTraceId}`,
+ targetHandle: connection.targetHandle,
+ type: 'merged',
+ style: { stroke: mergedDuplicate.color, strokeWidth: 2 },
+ data: { isMerged: true, colors: traceMeta.mergedColors || [] }
+ } as Edge
+ ],
});
- // Trigger propagation to update downstream
setTimeout(() => get().propagateTraces(), 0);
return;
- }
+ }
+ }
+
+ const duplicateResult = duplicateTracePath(originalTraceId, connection.source!);
+
+ if (duplicateResult) {
+ const { newTraceId, newEdges } = duplicateResult;
+ const newTraceColor = getStableColor(newTraceId);
+
+ set({
+ edges: [
+ ...get().edges,
+ ...newEdges,
+ {
+ id: `edge-${connection.source}-${connection.target}-${Date.now()}`,
+ source: connection.source!,
+ target: connection.target!,
+ sourceHandle: `trace-${newTraceId}`,
+ targetHandle: connection.targetHandle,
+ style: { stroke: newTraceColor, strokeWidth: 2 }
+ } as Edge
+ ],
+ });
+
+ setTimeout(() => get().propagateTraces(), 0);
+ return;
+ } else {
+ const newForkTrace = createSimpleForkTrace();
+
+ set({
+ edges: addEdge({
+ ...connection,
+ sourceHandle: `trace-${newForkTrace.id}`,
+ style: { stroke: newForkTrace.color, strokeWidth: 2 }
+ }, get().edges),
+ });
+
+ setTimeout(() => get().propagateTraces(), 0);
+ return;
+ }
}
- // Normal connection
+ // Normal connection - no existing edge from this handle
set({
edges: addEdge({
...connection,
@@ -173,7 +918,9 @@ const useFlowStore = create<FlowState>((set, get) => ({
}),
}));
- if (data.response !== undefined || data.userPrompt !== undefined) {
+ // Only propagate traces when response changes (affects downstream context)
+ // Do NOT propagate on userPrompt changes to avoid resetting activeTraceIds during typing
+ if (data.response !== undefined) {
get().propagateTraces();
}
},
@@ -186,16 +933,52 @@ const useFlowStore = create<FlowState>((set, get) => ({
const node = get().nodes.find(n => n.id === nodeId);
if (!node) return [];
- // The traces stored in node.data.traces are the INCOMING traces.
- // If we select one, we want its history.
+ const activeIds = node.data.activeTraceIds || [];
+ if (activeIds.length === 0) return [];
- const activeTraces = node.data.traces.filter(t =>
- node.data.activeTraceIds?.includes(t.id)
- );
+ // Collect all traces by ID to avoid duplicates
+ const tracesById = new Map<string, Trace>();
+
+ // Add incoming traces
+ (node.data.traces || []).forEach((t: Trace) => {
+ if (activeIds.includes(t.id)) {
+ tracesById.set(t.id, t);
+ }
+ });
+ // Add outgoing traces (only if not already in incoming)
+ (node.data.outgoingTraces || []).forEach((t: Trace) => {
+ if (activeIds.includes(t.id) && !tracesById.has(t.id)) {
+ tracesById.set(t.id, t);
+ }
+ });
+
+ // Collect messages from selected traces
const contextMessages: Message[] = [];
- activeTraces.forEach(t => {
- contextMessages.push(...t.messages);
+ const nodePrefix = `${nodeId}-`;
+
+ tracesById.forEach((t: Trace) => {
+ // For traces originated by this node, filter out this node's own messages
+ const isOriginated = t.id === `trace-${nodeId}` ||
+ t.id.startsWith('fork-') ||
+ (t.id.startsWith('prepend-') && t.id.includes(`-from-${nodeId}`));
+
+ if (isOriginated) {
+ // Only include prepended upstream messages
+ const prependedMessages = t.messages.filter(m => !m.id?.startsWith(nodePrefix));
+ contextMessages.push(...prependedMessages);
+ } else {
+ // Include all messages for incoming traces
+ contextMessages.push(...t.messages);
+ }
+ });
+
+ // Check merged traces
+ const activeMerged = (node.data.mergedTraces || []).filter((m: MergedTrace) =>
+ activeIds.includes(m.id)
+ );
+ activeMerged.forEach((m: MergedTrace) => {
+ contextMessages.push(...m.messages);
});
return contextMessages;
@@ -274,6 +1057,891 @@ const useFlowStore = create<FlowState>((set, get) => ({
get().propagateTraces();
},
+ deleteTrace: (startEdgeId: string) => {
+ const { edges, nodes } = get();
+ // Delete edges along the trace AND orphaned nodes (nodes with no remaining connections)
+ const edgesToDelete = new Set<string>();
+ const nodesInTrace = new Set<string>();
+
+ // Helper to traverse downstream EDGES based on Trace Dependency
+ const traverse = (currentEdge: Edge) => {
+ if (edgesToDelete.has(currentEdge.id)) return;
+ edgesToDelete.add(currentEdge.id);
+
+ const targetNodeId = currentEdge.target;
+ nodesInTrace.add(targetNodeId);
+
+ // Identify the trace ID carried by this edge
+ const traceId = currentEdge.sourceHandle?.replace('trace-', '');
+ if (!traceId) return;
+
+ // Look for outgoing edges from the target node that carry the EVOLUTION of this trace.
+ const expectedNextTraceId = `${traceId}_${targetNodeId}`;
+
+ const outgoing = edges.filter(e => e.source === targetNodeId);
+ outgoing.forEach(nextEdge => {
+ if (nextEdge.sourceHandle === `trace-${expectedNextTraceId}`) {
+ traverse(nextEdge);
+ }
+ });
+ };
+
+ // Also traverse backwards to find upstream nodes
+ const traverseBackward = (currentEdge: Edge) => {
+ if (edgesToDelete.has(currentEdge.id)) return;
+ edgesToDelete.add(currentEdge.id);
+
+ const sourceNodeId = currentEdge.source;
+ nodesInTrace.add(sourceNodeId);
+
+ // Find the incoming edge to the source node that is part of the same trace
+ const traceId = currentEdge.sourceHandle?.replace('trace-', '');
+ if (!traceId) return;
+
+ // Find the parent trace ID by removing the last _nodeId suffix
+ const lastUnderscore = traceId.lastIndexOf('_');
+ if (lastUnderscore > 0) {
+ const parentTraceId = traceId.substring(0, lastUnderscore);
+ const incoming = edges.filter(e => e.target === sourceNodeId);
+ incoming.forEach(prevEdge => {
+ if (prevEdge.sourceHandle === `trace-${parentTraceId}`) {
+ traverseBackward(prevEdge);
+ }
+ });
+ }
+ };
+
+ const startEdge = edges.find(e => e.id === startEdgeId);
+ if (startEdge) {
+ // Traverse forward
+ traverse(startEdge);
+ // Traverse backward
+ traverseBackward(startEdge);
+ }
+
+ // Filter remaining edges after deletion
+ const remainingEdges = edges.filter(e => !edgesToDelete.has(e.id));
+
+ // Find nodes that become orphaned (no connections at all after edge deletion)
+ const nodesToDelete = new Set<string>();
+ nodesInTrace.forEach(nodeId => {
+ const hasRemainingEdges = remainingEdges.some(
+ e => e.source === nodeId || e.target === nodeId
+ );
+ if (!hasRemainingEdges) {
+ nodesToDelete.add(nodeId);
+ }
+ });
+
+ set({
+ nodes: nodes.filter(n => !nodesToDelete.has(n.id)),
+ edges: remainingEdges
+ });
+
+ get().propagateTraces();
+ },
+
+ toggleNodeDisabled: (nodeId: string) => {
+ const node = get().nodes.find(n => n.id === nodeId);
+ if (node) {
+ const newDisabled = !node.data.disabled;
+ // Update node data AND draggable property
+ set(state => ({
+ nodes: state.nodes.map(n => {
+ if (n.id === nodeId) {
+ return {
+ ...n,
+ draggable: !newDisabled, // Disable dragging when node is disabled
+ selectable: !newDisabled, // Disable selection when node is disabled
+ data: { ...n.data, disabled: newDisabled }
+ };
+ }
+ return n;
+ })
+ }));
+ // Update edge styles to reflect disabled state
+ setTimeout(() => get().updateEdgeStyles(), 0);
+ }
+ },
+
+ archiveNode: (nodeId: string) => {
+ const node = get().nodes.find(n => n.id === nodeId);
+ if (!node) return;
+
+ const archived: ArchivedNode = {
+ id: `archive_${Date.now()}`,
+ label: node.data.label,
+ model: node.data.model,
+ systemPrompt: node.data.systemPrompt,
+ temperature: node.data.temperature,
+ reasoningEffort: node.data.reasoningEffort || 'medium',
+ userPrompt: node.data.userPrompt,
+ response: node.data.response,
+ enableGoogleSearch: node.data.enableGoogleSearch,
+ mergeStrategy: node.data.mergeStrategy,
+ attachedFileIds: node.data.attachedFileIds || []
+ };
+
+ set(state => ({
+ archivedNodes: [...state.archivedNodes, archived]
+ }));
+ setTimeout(() => get().saveArchivedNodes().catch(() => {}), 0);
+ },
+
+ removeFromArchive: (archiveId: string) => {
+ set(state => ({
+ archivedNodes: state.archivedNodes.filter(a => a.id !== archiveId)
+ }));
+ setTimeout(() => get().saveArchivedNodes().catch(() => {}), 0);
+ },
+
+ createNodeFromArchive: (archiveId: string, position: { x: number; y: number }) => {
+ const archived = get().archivedNodes.find(a => a.id === archiveId);
+ if (!archived) return;
+
+ const newNode: LLMNode = {
+ id: `node_${Date.now()}`,
+ type: 'llmNode',
+ position,
+ data: {
+ label: archived.label,
+ model: archived.model,
+ temperature: archived.temperature,
+ systemPrompt: archived.systemPrompt,
+ userPrompt: archived.userPrompt || '',
+ reasoningEffort: archived.reasoningEffort,
+ enableGoogleSearch: archived.enableGoogleSearch,
+ mergeStrategy: archived.mergeStrategy || 'smart',
+ traces: [],
+ outgoingTraces: [],
+ forkedTraces: [],
+ mergedTraces: [],
+ activeTraceIds: [],
+ attachedFileIds: archived.attachedFileIds || [],
+ response: archived.response || '',
+ status: 'idle',
+ inputs: 1
+ }
+ };
+
+ get().addNode(newNode);
+ },
+
+ toggleTraceDisabled: (edgeId: string) => {
+ const { edges, nodes } = get();
+ const edge = edges.find(e => e.id === edgeId);
+ if (!edge) return;
+
+ // Find all nodes connected through this trace (BIDIRECTIONAL)
+ const nodesInTrace = new Set<string>();
+ const visitedEdges = new Set<string>();
+
+ // Traverse downstream (source -> target direction)
+ const traverseDownstream = (currentNodeId: string) => {
+ nodesInTrace.add(currentNodeId);
+
+ const outgoing = edges.filter(e => e.source === currentNodeId);
+ outgoing.forEach(nextEdge => {
+ if (visitedEdges.has(nextEdge.id)) return;
+ visitedEdges.add(nextEdge.id);
+ traverseDownstream(nextEdge.target);
+ });
+ };
+
+ // Traverse upstream (target -> source direction)
+ const traverseUpstream = (currentNodeId: string) => {
+ nodesInTrace.add(currentNodeId);
+
+ const incoming = edges.filter(e => e.target === currentNodeId);
+ incoming.forEach(prevEdge => {
+ if (visitedEdges.has(prevEdge.id)) return;
+ visitedEdges.add(prevEdge.id);
+ traverseUpstream(prevEdge.source);
+ });
+ };
+
+ // Start bidirectional traversal from clicked edge
+ visitedEdges.add(edge.id);
+
+ // Go upstream from source (including source itself)
+ traverseUpstream(edge.source);
+
+ // Go downstream from target (including target itself)
+ traverseDownstream(edge.target);
+
+ // Check if any node in this trace is disabled
+ const anyDisabled = Array.from(nodesInTrace).some(
+ nodeId => nodes.find(n => n.id === nodeId)?.data.disabled
+ );
+
+ // Toggle: if any disabled -> enable all, else disable all
+ const newDisabledState = !anyDisabled;
+
+ set(state => ({
+ nodes: state.nodes.map(node => {
+ if (nodesInTrace.has(node.id)) {
+ return {
+ ...node,
+ draggable: !newDisabledState,
+ selectable: !newDisabledState,
+ data: { ...node.data, disabled: newDisabledState }
+ };
+ }
+ return node;
+ })
+ }));
+
+ // Update edge styles
+ get().updateEdgeStyles();
+ },
+
+ updateEdgeStyles: () => {
+ const { nodes, edges } = get();
+
+ const updatedEdges = edges.map(edge => {
+ const sourceNode = nodes.find(n => n.id === edge.source);
+ const targetNode = nodes.find(n => n.id === edge.target);
+
+ const isDisabled = sourceNode?.data.disabled || targetNode?.data.disabled;
+
+ return {
+ ...edge,
+ style: {
+ ...edge.style,
+ opacity: isDisabled ? 0.3 : 1,
+ strokeDasharray: isDisabled ? '5,5' : undefined
+ }
+ };
+ });
+
+ set({ edges: updatedEdges });
+ },
+
+ // Check if all nodes in trace path have complete Q&A
+ isTraceComplete: (trace: Trace) => {
+ // A trace is complete if all nodes in the path have complete Q&A pairs
+ const messages = trace.messages;
+
+ if (messages.length === 0) {
+ // Empty trace - check if the source node has content
+ const { nodes } = get();
+ const sourceNode = nodes.find(n => n.id === trace.sourceNodeId);
+ if (sourceNode) {
+ return !!sourceNode.data.userPrompt && !!sourceNode.data.response;
+ }
+ return true; // No source node, assume complete
+ }
+
+ // Extract node IDs from message IDs (format: nodeId-user or nodeId-assistant)
+ // Group messages by node and check each node has both user and assistant
+ const nodeMessages = new Map<string, { hasUser: boolean; hasAssistant: boolean }>();
+
+ for (const msg of messages) {
+ if (!msg.id) continue;
+
+ // Parse nodeId from message ID (format: nodeId-user or nodeId-assistant)
+ const parts = msg.id.split('-');
+ if (parts.length < 2) continue;
+
+ // The nodeId is everything except the last part (user/assistant)
+ const lastPart = parts[parts.length - 1];
+ const nodeId = lastPart === 'user' || lastPart === 'assistant'
+ ? parts.slice(0, -1).join('-')
+ : msg.id; // Fallback: use whole ID if format doesn't match
+
+ if (!nodeMessages.has(nodeId)) {
+ nodeMessages.set(nodeId, { hasUser: false, hasAssistant: false });
+ }
+
+ const nodeData = nodeMessages.get(nodeId)!;
+ if (msg.role === 'user') nodeData.hasUser = true;
+ if (msg.role === 'assistant') nodeData.hasAssistant = true;
+ }
+
+ // Check that ALL nodes in the trace have both user and assistant messages
+ for (const [_nodeId, data] of nodeMessages) {
+ if (!data.hasUser || !data.hasAssistant) {
+ return false; // This node is incomplete
+ }
+ }
+
+ // Must have at least one complete node
+ return nodeMessages.size > 0;
+ },
+
+ // Get all node IDs in trace path
+ getTraceNodeIds: (trace: Trace) => {
+ const traceId = trace.id;
+ const parts = traceId.replace('trace-', '').split('_');
+ return parts.filter(p => p.startsWith('node') || get().nodes.some(n => n.id === p));
+ },
+
+ // Create a new node for quick chat, with proper connection
+ createQuickChatNode: (
+ fromNodeId: string,
+ _trace: Trace | null,
+ userPrompt: string,
+ response: string,
+ model: string,
+ config: Partial<NodeData>
+ ) => {
+ const { nodes, addNode, updateNodeData } = get();
+ const fromNode = nodes.find(n => n.id === fromNodeId);
+
+ if (!fromNode) return '';
+
+ // Check if current node is empty (no response) -> overwrite it
+ const isCurrentNodeEmpty = !fromNode.data.response;
+
+ if (isCurrentNodeEmpty) {
+ // Overwrite current node
+ updateNodeData(fromNodeId, {
+ userPrompt,
+ response,
+ model,
+ status: 'success',
+ querySentAt: Date.now(),
+ responseReceivedAt: Date.now(),
+ ...config
+ });
+ return fromNodeId;
+ }
+
+ // Create new node to the right
+ // Use findNonOverlappingPosition to avoid collision, starting from the ideal position
+ const idealX = fromNode.position.x + 300;
+ const idealY = fromNode.position.y;
+
+ // Check if ideal position overlaps
+ const { findNonOverlappingPosition } = get();
+ const newPos = findNonOverlappingPosition(idealX, idealY);
+
+ const newNodeId = `node_${Date.now()}`;
+ const newNode: LLMNode = {
+ id: newNodeId,
+ type: 'llmNode',
+ position: newPos,
+ data: {
+ label: 'Quick Chat',
+ model,
+ temperature: config.temperature || 0.7,
+ systemPrompt: '',
+ userPrompt,
+ mergeStrategy: 'smart',
+ reasoningEffort: config.reasoningEffort || 'medium',
+ enableGoogleSearch: config.enableGoogleSearch,
+ traces: [],
+ outgoingTraces: [],
+ forkedTraces: [],
+ mergedTraces: [],
+ activeTraceIds: [],
+ attachedFileIds: [],
+ response,
+ status: 'success',
+ inputs: 1,
+ querySentAt: Date.now(),
+ responseReceivedAt: Date.now(),
+ ...config
+ }
+ };
+
+ addNode(newNode);
+
+ // Connect from the source node using new-trace handle (creates a fork)
+ setTimeout(() => {
+ const store = useFlowStore.getState();
+ store.onConnect({
+ source: fromNodeId,
+ sourceHandle: 'new-trace',
+ target: newNodeId,
+ targetHandle: 'input-0'
+ });
+ }, 50);
+
+ return newNodeId;
+ },
+
+ // -------- Blueprint serialization / persistence --------
+ setCurrentBlueprintPath: (path?: string) => {
+ set({ currentBlueprintPath: path });
+ },
+
+ serializeBlueprint: (viewport?: ViewportState): BlueprintDocument => {
+ return {
+ version: 1,
+ nodes: get().nodes,
+ edges: get().edges,
+ viewport: viewport || get().lastViewport,
+ theme: get().theme,
+ };
+ },
+
+ loadBlueprint: (doc: BlueprintDocument): ViewportState | undefined => {
+ set({
+ nodes: (doc.nodes || []) as LLMNode[],
+ edges: (doc.edges || []) as Edge[],
+ theme: doc.theme || get().theme,
+ selectedNodeId: null,
+ lastViewport: doc.viewport || get().lastViewport,
+ });
+ // Recompute traces after loading
+ setTimeout(() => get().propagateTraces(), 0);
+ return doc.viewport;
+ },
+
+ saveBlueprintFile: async (path: string, viewport?: ViewportState) => {
+ const payload = get().serializeBlueprint(viewport);
+ await jsonFetch(`${API_BASE}/api/projects/save_blueprint`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ user: getCurrentUser(),
+ path,
+ content: payload,
+ }),
+ });
+ set({ currentBlueprintPath: path, lastViewport: payload.viewport });
+ await get().refreshProjectTree();
+ },
+
+ readBlueprintFile: async (path: string): Promise<BlueprintDocument> => {
+ const res = await jsonFetch<{ content: BlueprintDocument }>(
+ `${API_BASE}/api/projects/file?user=${encodeURIComponent(getCurrentUser())}&path=${encodeURIComponent(path)}`
+ );
+ return validateBlueprint(res.content);
+ },
+
+ refreshProjectTree: async () => {
+ const tree = await jsonFetch<FSItem[]>(
+ `${API_BASE}/api/projects/tree?user=${encodeURIComponent(getCurrentUser())}`
+ );
+ set({ projectTree: tree });
+ return tree;
+ },
+
+ createProjectFolder: async (path: string) => {
+ await jsonFetch(`${API_BASE}/api/projects/create_folder`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ user: getCurrentUser(), path }),
+ });
+ await get().refreshProjectTree();
+ },
+
+ renameProjectItem: async (path: string, newName?: string, newPath?: string) => {
+ if (!newName && !newPath) {
+ throw new Error('newName or newPath is required');
+ }
+ await jsonFetch(`${API_BASE}/api/projects/rename`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ user: getCurrentUser(), path, new_name: newName, new_path: newPath }),
+ });
+ await get().refreshProjectTree();
+ },
+
+ deleteProjectItem: async (path: string, isFolder = false) => {
+ await jsonFetch(`${API_BASE}/api/projects/delete`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ user: getCurrentUser(), path, is_folder: isFolder }),
+ });
+ await get().refreshProjectTree();
+ },
+
+ saveCurrentBlueprint: async (path?: string, viewport?: ViewportState) => {
+ const targetPath = path || get().currentBlueprintPath;
+ if (!targetPath) {
+ throw new Error('No blueprint path. Please provide a file name.');
+ }
+ set({ saveStatus: 'saving' });
+ try {
+ await get().saveBlueprintFile(targetPath, viewport);
+ set({ saveStatus: 'saved', currentBlueprintPath: targetPath });
+ } catch (e) {
+ console.error(e);
+ set({ saveStatus: 'error' });
+ throw e;
+ }
+ },
+
+ clearBlueprint: () => {
+ set({
+ nodes: [],
+ edges: [],
+ selectedNodeId: null,
+ currentBlueprintPath: undefined,
+ lastViewport: undefined,
+ saveStatus: 'idle',
+ });
+ },
+
+ loadArchivedNodes: async () => {
+ const res = await jsonFetch<{ archived: ArchivedNode[] }>(
+ `${API_BASE}/api/projects/archived?user=${encodeURIComponent(getCurrentUser())}`
+ );
+ set({ archivedNodes: res.archived || [] });
+ },
+
+ saveArchivedNodes: async () => {
+ const payload = { user: getCurrentUser(), archived: get().archivedNodes };
+ await jsonFetch(`${API_BASE}/api/projects/archived`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload),
+ });
+ },
+
+ // Files management
+ refreshFiles: async () => {
+ const res = await jsonFetch<{ files: FileMeta[] }>(
+ `${API_BASE}/api/files?user=${encodeURIComponent(getCurrentUser())}`
+ );
+ set({ files: res.files || [] });
+ },
+
+ uploadFile: async (file: File, options?: UploadFileOptions) => {
+ const provider = options?.provider ?? 'local';
+ const purpose = options?.purpose;
+ const tempId = `${file.name}-${Date.now()}`;
+ const prev = get().uploadingFileIds || [];
+ set({ uploadingFileIds: [...prev, tempId] });
+ const form = new FormData();
+ form.append('file', file);
+ form.append('provider', provider);
+ if (purpose) {
+ form.append('purpose', purpose);
+ }
+ try {
+ const res = await fetch(`${API_BASE}/api/files/upload?user=${encodeURIComponent(getCurrentUser())}`, {
+ method: 'POST',
+ body: form,
+ });
+ if (!res.ok) {
+ throw new Error(await res.text());
+ }
+ const data = await res.json();
+ if (!data.file) {
+ throw new Error('Upload succeeded but no file info returned');
+ }
+ await get().refreshFiles();
+ return data.file as FileMeta;
+ } finally {
+ set({ uploadingFileIds: (get().uploadingFileIds || []).filter(id => id !== tempId) });
+ }
+ },
+
+ deleteFile: async (fileId: string) => {
+ const res = await fetch(`${API_BASE}/api/files/delete?user=${encodeURIComponent(getCurrentUser())}&file_id=${encodeURIComponent(fileId)}`, {
+ method: 'POST',
+ });
+ if (!res.ok) {
+ throw new Error(await res.text());
+ }
+ await get().refreshFiles();
+ },
+
+ addFileScope: async (fileId: string, scope: string) => {
+ const res = await fetch(`${API_BASE}/api/files/add_scope`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json', ...getAuthHeaders() },
+ body: JSON.stringify({ user: getCurrentUser(), file_id: fileId, scope }),
+ });
+ if (!res.ok) {
+ throw new Error(await res.text());
+ }
+ await get().refreshFiles();
+ },
+
+ removeFileScope: async (fileId: string, scope: string) => {
+ const res = await fetch(`${API_BASE}/api/files/remove_scope`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json', ...getAuthHeaders() },
+ body: JSON.stringify({ user: getCurrentUser(), file_id: fileId, scope }),
+ });
+ if (!res.ok) {
+ throw new Error(await res.text());
+ }
+ await get().refreshFiles();
+ },
+
+ // --------------------------------------------------------
+
+ // Compute merged messages based on strategy
+ // Optional tracesOverride parameter to use latest traces during propagation
+ computeMergedMessages: (nodeId: string, sourceTraceIds: string[], strategy: MergeStrategy, tracesOverride?: Trace[]): Message[] => {
+ const { nodes } = get();
+ const node = nodes.find(n => n.id === nodeId);
+ if (!node) return [];
+
+ // Use override traces if provided (for propagation), otherwise use node's stored traces
+ const availableTraces = tracesOverride || node.data.traces || [];
+
+ // Get the source traces
+ const sourceTraces = sourceTraceIds
+ .map(id => availableTraces.find((t: Trace) => t.id === id))
+ .filter((t): t is Trace => t !== undefined);
+
+ if (sourceTraces.length === 0) return [];
+
+ // Helper to add source trace info to a message
+ const tagMessage = (msg: Message, trace: Trace): Message => ({
+ ...msg,
+ sourceTraceId: trace.id,
+ sourceTraceColor: trace.color
+ });
+
+ // Helper to get timestamp for a message based on node
+ const getMessageTimestamp = (msg: Message, type: 'query' | 'response'): number => {
+ // Extract node ID from message ID (format: nodeId-user or nodeId-assistant)
+ const msgNodeId = msg.id?.split('-')[0];
+ if (!msgNodeId) return 0;
+
+ const msgNode = nodes.find(n => n.id === msgNodeId);
+ if (!msgNode) return 0;
+
+ if (type === 'query') {
+ return msgNode.data.querySentAt || 0;
+ } else {
+ return msgNode.data.responseReceivedAt || 0;
+ }
+ };
+
+ // Helper to get all messages with their timestamps and trace info
+ const getAllMessagesWithTime = () => {
+ const allMessages: { msg: Message; queryTime: number; responseTime: number; trace: Trace }[] = [];
+
+ sourceTraces.forEach((trace) => {
+ trace.messages.forEach(msg => {
+ allMessages.push({
+ msg: tagMessage(msg, trace),
+ queryTime: getMessageTimestamp(msg, 'query'),
+ responseTime: getMessageTimestamp(msg, 'response'),
+ trace
+ });
+ });
+ });
+
+ return allMessages;
+ };
+
+ switch (strategy) {
+ case 'query_time': {
+ // Sort by query time, keeping Q-A pairs together
+ const pairs: { user: Message | null; assistant: Message | null; time: number; trace: Trace }[] = [];
+
+ sourceTraces.forEach(trace => {
+ for (let i = 0; i < trace.messages.length; i += 2) {
+ const user = trace.messages[i];
+ const assistant = trace.messages[i + 1] || null;
+ const time = getMessageTimestamp(user, 'query');
+ pairs.push({
+ user: user ? tagMessage(user, trace) : null,
+ assistant: assistant ? tagMessage(assistant, trace) : null,
+ time,
+ trace
+ });
+ }
+ });
+
+ pairs.sort((a, b) => a.time - b.time);
+
+ const result: Message[] = [];
+ pairs.forEach(pair => {
+ if (pair.user) result.push(pair.user);
+ if (pair.assistant) result.push(pair.assistant);
+ });
+ return result;
+ }
+
+ case 'response_time': {
+ // Sort by response time, keeping Q-A pairs together
+ const pairs: { user: Message | null; assistant: Message | null; time: number; trace: Trace }[] = [];
+
+ sourceTraces.forEach(trace => {
+ for (let i = 0; i < trace.messages.length; i += 2) {
+ const user = trace.messages[i];
+ const assistant = trace.messages[i + 1] || null;
+ const time = getMessageTimestamp(assistant || user, 'response');
+ pairs.push({
+ user: user ? tagMessage(user, trace) : null,
+ assistant: assistant ? tagMessage(assistant, trace) : null,
+ time,
+ trace
+ });
+ }
+ });
+
+ pairs.sort((a, b) => a.time - b.time);
+
+ const result: Message[] = [];
+ pairs.forEach(pair => {
+ if (pair.user) result.push(pair.user);
+ if (pair.assistant) result.push(pair.assistant);
+ });
+ return result;
+ }
+
+ case 'trace_order': {
+ // Simply concatenate in the order of sourceTraceIds
+ const result: Message[] = [];
+ sourceTraces.forEach(trace => {
+ trace.messages.forEach(msg => {
+ result.push(tagMessage(msg, trace));
+ });
+ });
+ return result;
+ }
+
+ case 'grouped': {
+ // Same as trace_order but more explicitly grouped
+ const result: Message[] = [];
+ sourceTraces.forEach(trace => {
+ trace.messages.forEach(msg => {
+ result.push(tagMessage(msg, trace));
+ });
+ });
+ return result;
+ }
+
+ case 'interleaved': {
+ // True interleaving - sort ALL messages by their actual time
+ const allMessages = getAllMessagesWithTime();
+
+ // Sort by the earlier of query/response time for each message
+ allMessages.sort((a, b) => {
+ const aTime = a.msg.role === 'user' ? a.queryTime : a.responseTime;
+ const bTime = b.msg.role === 'user' ? b.queryTime : b.responseTime;
+ return aTime - bTime;
+ });
+
+ return allMessages.map(m => m.msg);
+ }
+
+ case 'summary': {
+ // For summary, we return all messages in trace order with source tags
+ // The actual summarization is done asynchronously and stored in summarizedContent
+ const result: Message[] = [];
+ sourceTraces.forEach(trace => {
+ trace.messages.forEach(msg => {
+ result.push(tagMessage(msg, trace));
+ });
+ });
+ return result;
+ }
+
+ default:
+ return [];
+ }
+ },
+
+ // Create a new merged trace
+ createMergedTrace: (nodeId: string, sourceTraceIds: string[], strategy: MergeStrategy): string => {
+ const { nodes, updateNodeData, computeMergedMessages } = get();
+ const node = nodes.find(n => n.id === nodeId);
+ if (!node) return '';
+
+ // Get colors from source traces (preserve multi-colors for merged parents)
+ const colors = sourceTraceIds.flatMap(id => {
+ const t = node.data.traces.find((tr: Trace) => tr.id === id);
+ if (!t) return [];
+ if (t.mergedColors && t.mergedColors.length > 0) return t.mergedColors;
+ return t.color ? [t.color] : [];
+ });
+
+ // Compute merged messages
+ const messages = computeMergedMessages(nodeId, sourceTraceIds, strategy);
+
+ const mergedTraceId = `merged-${nodeId}-${Date.now()}`;
+
+ const mergedTrace: MergedTrace = {
+ id: mergedTraceId,
+ sourceNodeId: nodeId,
+ sourceTraceIds,
+ strategy,
+ colors,
+ messages
+ };
+
+ const existingMerged = node.data.mergedTraces || [];
+ updateNodeData(nodeId, {
+ mergedTraces: [...existingMerged, mergedTrace]
+ });
+
+ // Trigger trace propagation to update outgoing traces
+ setTimeout(() => {
+ get().propagateTraces();
+ }, 50);
+
+ return mergedTraceId;
+ },
+
+ // Update an existing merged trace
+ updateMergedTrace: (nodeId: string, mergedTraceId: string, updates: { sourceTraceIds?: string[]; strategy?: MergeStrategy; summarizedContent?: string }) => {
+ const { nodes, updateNodeData, computeMergedMessages } = get();
+ const node = nodes.find(n => n.id === nodeId);
+ if (!node) return;
+
+ const existingMerged = node.data.mergedTraces || [];
+ const mergedIndex = existingMerged.findIndex((m: MergedTrace) => m.id === mergedTraceId);
+ if (mergedIndex === -1) return;
+
+ const current = existingMerged[mergedIndex];
+ const newSourceTraceIds = updates.sourceTraceIds || current.sourceTraceIds;
+ const newStrategy = updates.strategy || current.strategy;
+
+ // Recompute colors if source traces changed (preserve multi-colors)
+ let newColors = current.colors;
+ if (updates.sourceTraceIds) {
+ newColors = updates.sourceTraceIds.flatMap(id => {
+ const t = node.data.traces.find((tr: Trace) => tr.id === id);
+ if (!t) return [];
+ if (t.mergedColors && t.mergedColors.length > 0) return t.mergedColors;
+ return t.color ? [t.color] : [];
+ });
+ }
+
+ // Recompute messages if source or strategy changed
+ let newMessages = current.messages;
+ if (updates.sourceTraceIds || updates.strategy) {
+ newMessages = computeMergedMessages(nodeId, newSourceTraceIds, newStrategy);
+ }
+
+ const updatedMerged = [...existingMerged];
+ updatedMerged[mergedIndex] = {
+ ...current,
+ sourceTraceIds: newSourceTraceIds,
+ strategy: newStrategy,
+ colors: newColors,
+ messages: newMessages,
+ summarizedContent: updates.summarizedContent !== undefined ? updates.summarizedContent : current.summarizedContent
+ };
+
+ updateNodeData(nodeId, { mergedTraces: updatedMerged });
+
+ // Trigger trace propagation
+ setTimeout(() => {
+ get().propagateTraces();
+ }, 50);
+ },
+
+ // Delete a merged trace
+ deleteMergedTrace: (nodeId: string, mergedTraceId: string) => {
+ const { nodes, updateNodeData } = get();
+ const node = nodes.find(n => n.id === nodeId);
+ if (!node) return;
+
+ const existingMerged = node.data.mergedTraces || [];
+ const filteredMerged = existingMerged.filter((m: MergedTrace) => m.id !== mergedTraceId);
+
+ updateNodeData(nodeId, { mergedTraces: filteredMerged });
+
+ // Trigger trace propagation
+ setTimeout(() => {
+ get().propagateTraces();
+ }, 50);
+ },
+
propagateTraces: () => {
const { nodes, edges } = get();
@@ -315,47 +1983,107 @@ const useFlowStore = create<FlowState>((set, get) => ({
const nodeOutgoingTraces = new Map<string, Trace[]>();
// Map<NodeID, Trace[]>: Traces ENTERING this node (to update NodeData)
const nodeIncomingTraces = new Map<string, Trace[]>();
+ // Map<NodeID, string[]>: Merged traces to delete from each node (disconnected sources)
+ const nodeMergedTracesToDelete = new Map<string, string[]>();
+ // Map<NodeID, Map<MergedTraceId, UpdatedMergedTrace>>: Updated merged traces with new messages
+ const nodeUpdatedMergedTraces = new Map<string, Map<string, { messages: Message[], colors: string[] }>>();
+ // Map<NodeID, Message[]>: Prepend messages for each node (for recursive prepend chains)
+ const nodePrependMessages = new Map<string, Message[]>();
+ // Map<NodeID, Trace[]>: Forked traces to keep (cleanup unused ones)
+ const nodeForkedTracesToClean = new Map<string, Trace[]>();
// Also track Edge updates (Color AND SourceHandle)
const updatedEdges = [...edges];
- let edgesChanged = false;
// Iterate
sortedNodes.forEach(nodeId => {
const node = nodes.find(n => n.id === nodeId);
if (!node) return;
- // 1. Gather Incoming Traces
+ // 1. Gather Incoming Traces (regular and prepend)
const incomingEdges = edges.filter(e => e.target === nodeId);
const myIncomingTraces: Trace[] = [];
- incomingEdges.forEach(edge => {
+ // Map: traceIdToPrepend -> Message[] (messages to prepend to that trace)
+ const prependMessages = new Map<string, Message[]>();
+
+ incomingEdges.forEach(edge => {
const parentOutgoing = nodeOutgoingTraces.get(edge.source) || [];
+ const targetHandle = edge.targetHandle || '';
+
+ // Check if this is a prepend handle
+ const isPrependHandle = targetHandle.startsWith('prepend-');
+
+ // Special case: prepend connection (from a prepend trace created by onConnect)
+ if (isPrependHandle) {
+ // Find the source trace - it could be:
+ // 1. A prepend trace (prepend-xxx-from-yyy format)
+ // 2. Or any trace from the source handle
+ let matchedPrependTrace = parentOutgoing.find(t => edge.sourceHandle === `trace-${t.id}`);
+
+ // Fallback: if no exact match, find by sourceHandle pattern
+ if (!matchedPrependTrace && edge.sourceHandle?.startsWith('trace-')) {
+ const sourceTraceId = edge.sourceHandle.replace('trace-', '');
+ matchedPrependTrace = parentOutgoing.find(t => t.id === sourceTraceId);
+ }
+
+ // Extract the original target trace ID from the prepend handle
+ // Format is "prepend-{traceId}" where traceId could be "fork-xxx" or "trace-xxx"
+ const targetTraceId = targetHandle.replace('prepend-', '').replace('inherited-', '');
+
+ if (matchedPrependTrace && matchedPrependTrace.messages.length > 0) {
+ // Prepend the messages from the source trace
+ const existing = prependMessages.get(targetTraceId) || [];
+ prependMessages.set(targetTraceId, [...existing, ...matchedPrependTrace.messages]);
+
+ // Update edge color to match the trace
+ const edgeIndex = updatedEdges.findIndex(e => e.id === edge.id);
+ if (edgeIndex !== -1) {
+ const currentEdge = updatedEdges[edgeIndex];
+ if (currentEdge.style?.stroke !== matchedPrependTrace.color) {
+ updatedEdges[edgeIndex] = {
+ ...currentEdge,
+ style: { ...currentEdge.style, stroke: matchedPrependTrace.color, strokeWidth: 2 }
+ };
+ }
+ }
+ } else {
+ // Even if no messages yet, store the prepend trace info for later updates
+ // The prepend connection should still work when the source node gets content
+ const existing = prependMessages.get(targetTraceId) || [];
+ if (!prependMessages.has(targetTraceId)) {
+ prependMessages.set(targetTraceId, existing);
+ }
+ }
+ return; // Don't process prepend edges as regular incoming traces
+ }
// Find match based on Handle ID
- // EXACT match first
- // Since we removed 'new-trace' handle, we only look for exact trace matches.
let matchedTrace = parentOutgoing.find(t => edge.sourceHandle === `trace-${t.id}`);
// If no exact match, try to find a "Semantic Match" (Auto-Reconnect)
- // If edge.sourceHandle was 'trace-X', and now we have 'trace-X_Parent', that's a likely evolution.
if (!matchedTrace && edge.sourceHandle?.startsWith('trace-')) {
const oldId = edge.sourceHandle.replace('trace-', '');
matchedTrace = parentOutgoing.find(t => t.id === `${oldId}_${edge.source}`);
}
// Fallback: If still no match, and parent has traces, try to connect to the most logical one.
- // If parent has only 1 trace, connect to it.
- // This handles cases where edge.sourceHandle might be null or outdated.
if (!matchedTrace && parentOutgoing.length > 0) {
- // If edge has no handle ID, default to the last generated trace (usually Self Trace)
if (!edge.sourceHandle) {
matchedTrace = parentOutgoing[parentOutgoing.length - 1];
}
}
if (matchedTrace) {
- myIncomingTraces.push(matchedTrace);
+ if (isPrependHandle) {
+ // This is a prepend connection from a trace handle - extract the target trace ID
+ const targetTraceId = targetHandle.replace('prepend-', '').replace('inherited-', '');
+ const existing = prependMessages.get(targetTraceId) || [];
+ prependMessages.set(targetTraceId, [...existing, ...matchedTrace.messages]);
+ } else {
+ // Regular incoming trace
+ myIncomingTraces.push(matchedTrace);
+ }
// Update Edge Visuals & Logical Connection
const edgeIndex = updatedEdges.findIndex(e => e.id === edge.id);
@@ -363,15 +2091,53 @@ const useFlowStore = create<FlowState>((set, get) => ({
const currentEdge = updatedEdges[edgeIndex];
const newHandleId = `trace-${matchedTrace.id}`;
- // Check if we need to update
- if (currentEdge.sourceHandle !== newHandleId || currentEdge.style?.stroke !== matchedTrace.color) {
- updatedEdges[edgeIndex] = {
- ...currentEdge,
- sourceHandle: newHandleId, // Auto-update handle connection!
- style: { ...currentEdge.style, stroke: matchedTrace.color, strokeWidth: 2 }
- };
- edgesChanged = true;
- }
+ // Check if this is a merged trace (need gradient)
+ // Use the new properties on Trace object
+ const isMergedTrace = matchedTrace.isMerged || matchedTrace.id.startsWith('merged-');
+ const mergedColors = matchedTrace.mergedColors || [];
+
+ // If colors not on trace, try to find in parent node's mergedTraces (for originator)
+ let finalColors = mergedColors;
+ if (isMergedTrace && finalColors.length === 0) {
+ const parentNode = nodes.find(n => n.id === edge.source);
+ const mergedData = parentNode?.data.mergedTraces?.find((m: MergedTrace) => m.id === matchedTrace.id);
+ if (mergedData) finalColors = mergedData.colors;
+ }
+
+ // Create gradient for merged traces
+ let gradient: string | undefined;
+ if (finalColors.length > 0) {
+ const gradientStops = finalColors.map((color: string, idx: number) =>
+ `${color} ${(idx / finalColors.length) * 100}%, ${color} ${((idx + 1) / finalColors.length) * 100}%`
+ ).join(', ');
+ gradient = `linear-gradient(90deg, ${gradientStops})`;
+ }
+
+ // Check if we need to update
+ // Update if handle changed OR color changed OR merged status/colors changed
+ const currentIsMerged = currentEdge.data?.isMerged;
+ const currentColors = currentEdge.data?.colors;
+ const colorsChanged = JSON.stringify(currentColors) !== JSON.stringify(finalColors);
+
+ if (currentEdge.sourceHandle !== newHandleId ||
+ currentEdge.style?.stroke !== matchedTrace.color ||
+ currentIsMerged !== isMergedTrace ||
+ colorsChanged) {
+
+ updatedEdges[edgeIndex] = {
+ ...currentEdge,
+ sourceHandle: newHandleId,
+ type: isMergedTrace ? 'merged' : undefined,
+ style: { ...currentEdge.style, stroke: matchedTrace.color, strokeWidth: 2 },
+ data: {
+ ...currentEdge.data,
+ gradient,
+ isMerged: isMergedTrace,
+ colors: finalColors
+ }
+ };
+ // Edge was updated
+ }
}
}
});
@@ -402,54 +2168,153 @@ const useFlowStore = create<FlowState>((set, get) => ({
const myOutgoingTraces: Trace[] = [];
- // A. Pass-through traces (append history)
+ // A. Pass-through traces (append history) - only if there's a downstream edge
uniqueIncoming.forEach(t => {
- // When a trace passes through a node and gets modified, it effectively becomes a NEW branch of that trace.
- // We must append the current node ID to the trace ID to distinguish branches.
- // e.g. Trace "root" -> passes Node A -> becomes "root_A"
- // If it passes Node B -> becomes "root_B"
- // Downstream Node D can then distinguish "root_A" from "root_B".
-
- // Match Logic:
- // We need to find if this edge was PREVIOUSLY connected to a trace that has now evolved into 'newTrace'.
- // The edge.sourceHandle might be the OLD ID.
- // We need a heuristic: if edge.sourceHandle contains the ROOT ID of this trace, we assume it's a match.
- // But this is risky if multiple branches exist.
-
- // Better heuristic:
- // When we extend a trace t -> t_new (with id t.id + '_' + node.id),
- // we record this evolution mapping.
-
- const newTraceId = `${t.id}_${node.id}`;
-
- myOutgoingTraces.push({
- ...t,
- id: newTraceId,
- messages: [...t.messages, ...myResponseMsg]
- });
+ // SIMPLIFICATION: Keep the same Trace ID for pass-through traces.
+ // This ensures A-B-C appears as a single continuous trace with the same ID.
+ // Only branch/fork traces get new IDs.
+
+ const passThroughId = t.id;
+
+ // Only create pass-through if there's actually a downstream edge using it
+ const hasDownstreamEdge = updatedEdges.some(e =>
+ e.source === node.id &&
+ (e.sourceHandle === `trace-${passThroughId}`)
+ );
+
+ if (hasDownstreamEdge) {
+ myOutgoingTraces.push({
+ ...t,
+ id: passThroughId,
+ // Keep original sourceNodeId - this is a pass-through, not originated here
+ sourceNodeId: t.sourceNodeId,
+ messages: [...t.messages, ...myResponseMsg]
+ });
+ }
});
- // B. Self Trace (New Branch) -> This is the "Default" self trace (always there?)
- // Actually, if we use Manual Forks, maybe we don't need an automatic self trace?
- // Or maybe the "Default" self trace is just one of the outgoing ones.
- // Let's keep it for compatibility if downstream picks it up automatically.
- const selfTrace: Trace = {
- id: `trace-${node.id}`,
- sourceNodeId: node.id,
- color: getStableColor(node.id),
- messages: [...myResponseMsg]
- };
- myOutgoingTraces.push(selfTrace);
+ // B. Self Trace (New Branch) -> Only create if there's a downstream edge using it
+ const selfTraceId = `trace-${node.id}`;
+ const selfPrepend = prependMessages.get(selfTraceId) || [];
- // C. Manual Forks
+ // Store prepend messages for this node (for recursive prepend chains)
+ // This includes any prepend from upstream and this node's own messages
+ if (selfPrepend.length > 0 || myResponseMsg.length > 0) {
+ nodePrependMessages.set(node.id, [...selfPrepend, ...myResponseMsg]);
+ }
+
+ // Only add self trace to outgoing if there's actually a downstream edge using it
+ // Check if any edge uses this self trace as source
+ // Edge sourceHandle format: "trace-{traceId}" where traceId = "trace-{nodeId}"
+ // So sourceHandle would be "trace-trace-{nodeId}"
+ const hasSelfTraceEdge = updatedEdges.some(e =>
+ e.source === node.id &&
+ (e.sourceHandle === `trace-${selfTraceId}` || e.sourceHandle === selfTraceId)
+ );
+
+ if (hasSelfTraceEdge) {
+ const selfTrace: Trace = {
+ id: selfTraceId,
+ sourceNodeId: node.id,
+ color: getStableColor(node.id),
+ messages: [...selfPrepend, ...myResponseMsg]
+ };
+ myOutgoingTraces.push(selfTrace);
+ }
+
+ // C. Manual Forks - only include forks that have downstream edges
if (node.data.forkedTraces) {
- // We need to keep them updated with the latest messages (if prompt changed)
- // But keep their IDs and Colors stable.
- const updatedForks = node.data.forkedTraces.map(fork => ({
- ...fork,
- messages: [...myResponseMsg] // Re-sync messages
- }));
+ // Filter to only forks that are actually being used (have edges)
+ const activeForks = node.data.forkedTraces.filter(fork => {
+ return updatedEdges.some(e =>
+ e.source === node.id && e.sourceHandle === `trace-${fork.id}`
+ );
+ });
+
+ // Update messages for active forks
+ const updatedForks = activeForks.map(fork => {
+ const forkPrepend = prependMessages.get(fork.id) || [];
+ return {
+ ...fork,
+ messages: [...forkPrepend, ...myResponseMsg] // Prepend + current messages
+ };
+ });
myOutgoingTraces.push(...updatedForks);
+
+ // Clean up unused forks from node data
+ if (activeForks.length !== node.data.forkedTraces.length) {
+ nodeForkedTracesToClean.set(nodeId, activeForks);
+ }
+ }
+
+ // D. Merged Traces - recompute messages when source traces change
+ // Also check if any source traces are disconnected, and mark for deletion
+ const mergedTracesToDelete: string[] = [];
+ const updatedMergedMap = new Map<string, { messages: Message[], colors: string[] }>();
+
+ if (node.data.mergedTraces && node.data.mergedTraces.length > 0) {
+ const { computeMergedMessages } = get();
+
+ node.data.mergedTraces.forEach((merged: MergedTrace) => {
+ // Check if all source traces are still connected
+ const allSourcesConnected = merged.sourceTraceIds.every(id =>
+ uniqueIncoming.some(t => t.id === id)
+ );
+
+ if (!allSourcesConnected) {
+ // Mark this merged trace for deletion
+ mergedTracesToDelete.push(merged.id);
+ return; // Don't add to outgoing traces
+ }
+
+ // Recompute messages based on the current incoming traces (pass uniqueIncoming for latest data)
+ const updatedMessages = computeMergedMessages(node.id, merged.sourceTraceIds, merged.strategy, uniqueIncoming);
+
+ // Filter out current node's messages from updatedMessages to avoid duplication
+ // (since myResponseMsg will be appended at the end)
+ const nodePrefix = `${node.id}-`;
+ const filteredMessages = updatedMessages.filter(m => !m.id?.startsWith(nodePrefix));
+
+ // Get prepend messages for this merged trace
+ const mergedPrepend = prependMessages.get(merged.id) || [];
+
+ // Update colors from current traces (preserve multi-colors)
+ const updatedColors = merged.sourceTraceIds.flatMap(id => {
+ const t = uniqueIncoming.find(trace => trace.id === id);
+ if (!t) return [];
+ if (t.mergedColors && t.mergedColors.length > 0) return t.mergedColors;
+ return t.color ? [t.color] : [];
+ });
+
+ // Combine all messages for this merged trace
+ const mergedMessages = [...mergedPrepend, ...filteredMessages, ...myResponseMsg];
+
+ // Store updated data for bulk update later
+ updatedMergedMap.set(merged.id, { messages: mergedMessages, colors: updatedColors });
+
+ // Create a trace-like object for merged output
+ const mergedOutgoing: Trace = {
+ id: merged.id,
+ sourceNodeId: node.id,
+ color: updatedColors[0] || getStableColor(merged.id),
+ messages: mergedMessages,
+ isMerged: true,
+ mergedColors: updatedColors,
+ sourceTraceIds: merged.sourceTraceIds
+ };
+
+ myOutgoingTraces.push(mergedOutgoing);
+ });
+ }
+
+ // Store merged traces to delete for this node
+ if (mergedTracesToDelete.length > 0) {
+ nodeMergedTracesToDelete.set(nodeId, mergedTracesToDelete);
+ }
+
+ // Store updated merged trace data for this node
+ if (updatedMergedMap.size > 0) {
+ nodeUpdatedMergedTraces.set(nodeId, updatedMergedMap);
}
nodeOutgoingTraces.set(nodeId, myOutgoingTraces);
@@ -461,23 +2326,78 @@ const useFlowStore = create<FlowState>((set, get) => ({
});
// Bulk Update Store
+ const uniqTraces = (list: Trace[]) => Array.from(new Map(list.map(t => [t.id, t])).values());
+ const uniqMerged = (list: MergedTrace[]) => Array.from(new Map(list.map(m => [m.id, m])).values());
+
set(state => ({
edges: updatedEdges,
nodes: state.nodes.map(n => {
- const traces = nodeIncomingTraces.get(n.id) || [];
- const outTraces = nodeOutgoingTraces.get(n.id) || [];
+ const traces = uniqTraces(nodeIncomingTraces.get(n.id) || []);
+ const outTraces = uniqTraces(nodeOutgoingTraces.get(n.id) || []);
+ const mergedToDelete = nodeMergedTracesToDelete.get(n.id) || [];
+ const updatedMerged = nodeUpdatedMergedTraces.get(n.id);
+ const cleanedForks = nodeForkedTracesToClean.get(n.id);
+
+ // Filter out disconnected merged traces and update messages for remaining ones
+ let filteredMergedTraces = uniqMerged((n.data.mergedTraces || []).filter(
+ (m: MergedTrace) => !mergedToDelete.includes(m.id)
+ ));
+
+ // Apply updated messages and colors to merged traces
+ if (updatedMerged && updatedMerged.size > 0) {
+ filteredMergedTraces = filteredMergedTraces.map((m: MergedTrace) => {
+ const updated = updatedMerged.get(m.id);
+ if (updated) {
+ return { ...m, messages: updated.messages, colors: updated.colors };
+ }
+ return m;
+ });
+ }
+
+ // Clean up unused forked traces
+ const filteredForkedTraces = cleanedForks !== undefined
+ ? cleanedForks
+ : (n.data.forkedTraces || []);
+
+ // Update activeTraceIds: remove deleted merged traces and truly orphaned fork traces
+ // Preserve all other valid trace selections
+ const activeForkIds = filteredForkedTraces.map(f => f.id);
+ const incomingTraceIds = traces.map(t => t.id);
+ const outgoingTraceIds = outTraces.map(t => t.id);
+ const originalActiveIds = n.data.activeTraceIds || [];
+
+ const filteredActiveTraceIds = originalActiveIds.filter(
+ (id: string) => {
+ // Remove deleted merged traces
+ if (mergedToDelete.includes(id)) return false;
+ // For fork traces: only remove if truly orphaned
+ // A fork is NOT orphaned if it's in forkedTraces, incoming traces, or outgoing traces
+ if (id.startsWith('fork-')) {
+ const isInForkedTraces = activeForkIds.includes(id);
+ const isIncomingTrace = incomingTraceIds.includes(id);
+ const isOutgoingTrace = outgoingTraceIds.includes(id);
+ if (!isInForkedTraces && !isIncomingTrace && !isOutgoingTrace) return false;
+ }
+ // Keep all other selections
+ return true;
+ }
+ );
+
return {
...n,
data: {
...n.data,
traces,
outgoingTraces: outTraces,
- activeTraceIds: n.data.activeTraceIds
+ forkedTraces: filteredForkedTraces,
+ mergedTraces: filteredMergedTraces,
+ activeTraceIds: filteredActiveTraceIds
}
};
})
}));
- }
-}));
+ },
+ };
+});
export default useFlowStore;