summaryrefslogtreecommitdiff
path: root/frontend/src/store/flowStore.ts
diff options
context:
space:
mode:
Diffstat (limited to 'frontend/src/store/flowStore.ts')
-rw-r--r--frontend/src/store/flowStore.ts298
1 files changed, 245 insertions, 53 deletions
diff --git a/frontend/src/store/flowStore.ts b/frontend/src/store/flowStore.ts
index 49a8ece..5ed66e6 100644
--- a/frontend/src/store/flowStore.ts
+++ b/frontend/src/store/flowStore.ts
@@ -30,6 +30,10 @@ export interface Trace {
sourceNodeId: string;
color: string;
messages: Message[];
+ // Optional merged trace info for visual propagation
+ isMerged?: boolean;
+ mergedColors?: string[];
+ sourceTraceIds?: string[];
}
// Merge strategy types
@@ -200,12 +204,8 @@ const useFlowStore = create<FlowState>((set, get) => ({
const isOverlapping = (testX: number, testY: number) => {
return nodes.some(node => {
- // Use the same estimated dimensions for existing nodes too
- // Ideally we would know their actual dimensions, but this is a safe approximation
const nodeX = node.position.x;
const nodeY = node.position.y;
-
- // Check for overlap
return !(testX + nodeWidth + padding < nodeX ||
testX > nodeX + nodeWidth + padding ||
testY + nodeHeight + padding < nodeY ||
@@ -383,7 +383,12 @@ const useFlowStore = create<FlowState>((set, get) => ({
}
// Helper to trace back the path of a trace by following edges upstream
- const duplicateTracePath = (traceId: string, forkAtNodeId: string): { newTraceId: string, newEdges: Edge[], firstNodeId: string } | null => {
+ const duplicateTracePath = (
+ traceId: string,
+ forkAtNodeId: string,
+ traceOwnerNodeId?: string,
+ pendingEdges: Edge[] = []
+ ): { newTraceId: string, newEdges: Edge[], firstNodeId: string } | null => {
// Trace back from forkAtNodeId to find the origin of this trace
// We follow incoming edges that match the trace pattern
@@ -393,10 +398,10 @@ const useFlowStore = create<FlowState>((set, get) => ({
// Trace backwards through incoming edges
while (true) {
- // Find incoming edge to current node that's part of this trace
+ // Find incoming edge to current node that carries THIS trace ID
const incomingEdge = edges.find(e =>
e.target === currentNodeId &&
- e.sourceHandle?.startsWith('trace-')
+ e.sourceHandle === `trace-${traceId}`
);
if (!incomingEdge) break; // Reached the start of the trace
@@ -413,14 +418,13 @@ const useFlowStore = create<FlowState>((set, get) => ({
const firstNode = nodes.find(n => n.id === firstNodeId);
if (!firstNode) return null;
- // Create a new trace ID for the duplicated path
- const timestamp = Date.now();
- const newTraceId = `fork-${firstNodeId}-${timestamp}`;
+ // Create a new trace ID for the duplicated path (guarantee uniqueness even within the same ms)
+ const uniq = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
+ const newTraceId = `fork-${firstNodeId}-${uniq}`;
const newTraceColor = getStableColor(newTraceId);
// Create new edges for the entire path
const newEdges: Edge[] = [];
- let evolvedTraceId = newTraceId;
// Track which input handles we're creating for new edges
const newInputHandles: Map<string, number> = new Map();
@@ -432,26 +436,30 @@ const useFlowStore = create<FlowState>((set, get) => ({
// Find the next available input handle for the target node
// Count existing edges to this node + any new edges we're creating
- const existingEdgesToTarget = edges.filter(e => e.target === toNodeId).length;
+ const existingEdgesToTarget =
+ edges.filter(e => e.target === toNodeId).length +
+ pendingEdges.filter(e => e.target === toNodeId).length;
const newEdgesToTarget = newInputHandles.get(toNodeId) || 0;
const nextInputIndex = existingEdgesToTarget + newEdgesToTarget;
newInputHandles.set(toNodeId, newEdgesToTarget + 1);
newEdges.push({
- id: `edge-fork-${timestamp}-${i}`,
+ id: `edge-fork-${uniq}-${i}`,
source: fromNodeId,
target: toNodeId,
- sourceHandle: `trace-${evolvedTraceId}`,
+ sourceHandle: `trace-${newTraceId}`,
targetHandle: `input-${nextInputIndex}`,
style: { stroke: newTraceColor, strokeWidth: 2 }
});
-
- // Evolve trace ID for next edge
- evolvedTraceId = `${evolvedTraceId}_${toNodeId}`;
}
// Find the messages up to the fork point
- const originalTrace = sourceNode.data.outgoingTraces?.find(t => t.id === traceId);
+ const traceOwnerNode = traceOwnerNodeId
+ ? nodes.find(n => n.id === traceOwnerNodeId)
+ : sourceNode;
+ if (!traceOwnerNode) return null;
+
+ const originalTrace = traceOwnerNode.data.outgoingTraces?.find(t => t.id === traceId);
const messagesUpToFork = originalTrace?.messages || [];
// Add the new trace as a forked trace on the first node
@@ -468,6 +476,161 @@ const useFlowStore = create<FlowState>((set, get) => ({
return { newTraceId, newEdges, firstNodeId };
};
+
+ // Helper to duplicate the downstream segment of a trace from a start node to an end node
+ const duplicateDownstreamSegment = (
+ originalTraceId: string,
+ startNodeId: string,
+ endNodeId: string,
+ newTraceId: string,
+ newTraceColor: string,
+ newTraceColors: string[]
+ ): Edge[] | null => {
+ const segmentEdges: Edge[] = [];
+ let currentNodeId = startNodeId;
+ const visitedEdgeIds = new Set<string>();
+
+ while (currentNodeId !== endNodeId) {
+ const nextEdge = edges.find(
+ (e) => e.source === currentNodeId && e.sourceHandle === `trace-${originalTraceId}`
+ );
+
+ if (!nextEdge || visitedEdgeIds.has(nextEdge.id)) {
+ return null;
+ }
+
+ segmentEdges.push(nextEdge);
+ visitedEdgeIds.add(nextEdge.id);
+ currentNodeId = nextEdge.target;
+ }
+
+ const newEdges: Edge[] = [];
+ const newInputCounts: Map<string, number> = new Map();
+ const segmentTimestamp = Date.now();
+
+ segmentEdges.forEach((edge, index) => {
+ const targetNodeId = edge.target;
+ const existingEdgesToTarget = edges.filter((e) => e.target === targetNodeId).length;
+ const additionalEdges = newInputCounts.get(targetNodeId) || 0;
+ const nextInputIndex = existingEdgesToTarget + additionalEdges;
+ newInputCounts.set(targetNodeId, additionalEdges + 1);
+
+ newEdges.push({
+ id: `edge-merged-seg-${segmentTimestamp}-${index}`,
+ source: edge.source,
+ target: edge.target,
+ sourceHandle: `trace-${newTraceId}`,
+ targetHandle: `input-${nextInputIndex}`,
+ type: 'merged',
+ style: { stroke: newTraceColor, strokeWidth: 2 },
+ data: { isMerged: true, colors: newTraceColors }
+ });
+ });
+
+ return newEdges;
+ };
+
+ // Helper to duplicate a merged trace by cloning its parent traces and creating a new merged branch
+ const duplicateMergedTraceBranch = (
+ mergedTrace: Trace,
+ forkAtNodeId: string
+ ): { newTraceId: string; newEdges: Edge[]; color: string } | null => {
+ const mergeNodeId = mergedTrace.sourceNodeId;
+ const mergeNode = nodes.find((n) => n.id === mergeNodeId);
+ if (!mergeNode) return null;
+
+ const mergedDef =
+ mergeNode.data.mergedTraces?.find((m: MergedTrace) => m.id === mergedTrace.id) || null;
+ const parentTraceIds = mergedTrace.sourceTraceIds || mergedDef?.sourceTraceIds || [];
+ if (parentTraceIds.length === 0) return null;
+
+ let accumulatedEdges: Edge[] = [];
+ const newParentTraceIds: string[] = [];
+ const parentOverrides: Trace[] = [];
+
+ for (const parentId of parentTraceIds) {
+ const originalParentTrace = mergeNode.data.traces?.find((t: Trace) => t.id === parentId);
+
+ if (originalParentTrace?.isMerged && originalParentTrace.sourceTraceIds?.length) {
+ const nestedDuplicate = duplicateMergedTraceBranch(originalParentTrace, mergeNodeId);
+ if (!nestedDuplicate) {
+ return null;
+ }
+ accumulatedEdges = accumulatedEdges.concat(nestedDuplicate.newEdges);
+ newParentTraceIds.push(nestedDuplicate.newTraceId);
+
+ parentOverrides.push({
+ ...originalParentTrace,
+ id: nestedDuplicate.newTraceId,
+ });
+
+ continue;
+ }
+
+ const duplicateResult = duplicateTracePath(parentId, mergeNodeId, mergeNodeId, accumulatedEdges);
+ if (!duplicateResult) {
+ return null;
+ }
+ accumulatedEdges = accumulatedEdges.concat(duplicateResult.newEdges);
+ newParentTraceIds.push(duplicateResult.newTraceId);
+
+ if (originalParentTrace) {
+ parentOverrides.push({
+ ...originalParentTrace,
+ id: duplicateResult.newTraceId,
+ });
+ }
+ }
+
+ const strategy = mergedDef?.strategy || 'trace_order';
+ const uniqMerged = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
+ const newMergedId = `merged-${mergeNodeId}-${uniqMerged}`;
+ const newColors =
+ parentOverrides.length > 0
+ ? parentOverrides.map((t) => t.color).filter((c): c is string => Boolean(c))
+ : mergedTrace.mergedColors ?? [];
+
+ const overrideTraces = parentOverrides.length > 0 ? parentOverrides : undefined;
+ const mergedMessages = get().computeMergedMessages(
+ mergeNodeId,
+ newParentTraceIds,
+ strategy,
+ overrideTraces
+ );
+
+ const newMergedDefinition: MergedTrace = {
+ id: newMergedId,
+ sourceNodeId: mergeNodeId,
+ sourceTraceIds: newParentTraceIds,
+ strategy,
+ colors: newColors.length ? newColors : [mergedTrace.color],
+ messages: mergedMessages,
+ };
+
+ const existingMerged = mergeNode.data.mergedTraces || [];
+ get().updateNodeData(mergeNodeId, {
+ mergedTraces: [...existingMerged, newMergedDefinition],
+ });
+
+ const newMergedColor = newColors[0] || mergedTrace.color || getStableColor(newMergedId);
+ const downstreamEdges = duplicateDownstreamSegment(
+ mergedTrace.id,
+ mergeNodeId,
+ forkAtNodeId,
+ newMergedId,
+ newMergedColor,
+ newColors.length ? newColors : [mergedTrace.color]
+ );
+ if (!downstreamEdges) return null;
+
+ accumulatedEdges = accumulatedEdges.concat(downstreamEdges);
+
+ return {
+ newTraceId: newMergedId,
+ newEdges: accumulatedEdges,
+ color: newMergedColor,
+ };
+ };
// Helper to create a simple forked trace (for new-trace handle or first connection)
const createSimpleForkTrace = () => {
@@ -527,30 +690,40 @@ const useFlowStore = create<FlowState>((set, get) => ({
);
if (existingEdgeFromHandle && connection.sourceHandle?.startsWith('trace-')) {
- // This handle already has a connection - need to duplicate the entire upstream trace path
const originalTraceId = connection.sourceHandle.replace('trace-', '');
+ const traceMeta = sourceNode.data.outgoingTraces?.find((t: Trace) => t.id === originalTraceId);
+
+ if (traceMeta?.isMerged && traceMeta.sourceTraceIds && traceMeta.sourceTraceIds.length > 0) {
+ const mergedDuplicate = duplicateMergedTraceBranch(traceMeta, connection.source!);
+ if (mergedDuplicate) {
+ set({
+ edges: [
+ ...get().edges,
+ ...mergedDuplicate.newEdges,
+ {
+ id: `edge-${connection.source}-${connection.target}-${Date.now()}`,
+ source: connection.source!,
+ target: connection.target!,
+ sourceHandle: `trace-${mergedDuplicate.newTraceId}`,
+ targetHandle: connection.targetHandle,
+ type: 'merged',
+ style: { stroke: mergedDuplicate.color, strokeWidth: 2 },
+ data: { isMerged: true, colors: traceMeta.mergedColors || [] }
+ } as Edge
+ ],
+ });
+
+ setTimeout(() => get().propagateTraces(), 0);
+ return;
+ }
+ }
+
const duplicateResult = duplicateTracePath(originalTraceId, connection.source!);
if (duplicateResult) {
- // Add all the duplicated edges plus the new connection
- const { newTraceId, newEdges, firstNodeId } = duplicateResult;
+ const { newTraceId, newEdges } = duplicateResult;
const newTraceColor = getStableColor(newTraceId);
- // Calculate the evolved trace ID at the fork node
- // The trace evolves through each node: trace-A -> trace-A_B -> trace-A_B_C
- // We need to build the evolved ID based on the path
- let evolvedTraceId = newTraceId;
-
- // Find the path from first node to fork node by looking at the new edges
- for (const edge of newEdges) {
- if (edge.target === connection.source) {
- // This edge ends at our fork node, so the evolved trace ID is after this edge
- evolvedTraceId = `${evolvedTraceId}_${edge.target}`;
- break;
- }
- evolvedTraceId = `${evolvedTraceId}_${edge.target}`;
- }
-
set({
edges: [
...get().edges,
@@ -559,7 +732,7 @@ const useFlowStore = create<FlowState>((set, get) => ({
id: `edge-${connection.source}-${connection.target}-${Date.now()}`,
source: connection.source!,
target: connection.target!,
- sourceHandle: `trace-${evolvedTraceId}`,
+ sourceHandle: `trace-${newTraceId}`,
targetHandle: connection.targetHandle,
style: { stroke: newTraceColor, strokeWidth: 2 }
} as Edge
@@ -569,7 +742,6 @@ const useFlowStore = create<FlowState>((set, get) => ({
setTimeout(() => get().propagateTraces(), 0);
return;
} else {
- // Fallback to simple fork if path duplication fails
const newForkTrace = createSimpleForkTrace();
set({
@@ -1561,24 +1733,38 @@ const useFlowStore = create<FlowState>((set, get) => ({
const newHandleId = `trace-${matchedTrace.id}`;
// Check if this is a merged trace (need gradient)
- const isMergedTrace = matchedTrace.id.startsWith('merged-');
+ // Use the new properties on Trace object
+ const isMergedTrace = matchedTrace.isMerged || matchedTrace.id.startsWith('merged-');
+ const mergedColors = matchedTrace.mergedColors || [];
+
+ // If colors not on trace, try to find in parent node's mergedTraces (for originator)
+ let finalColors = mergedColors;
+ if (isMergedTrace && finalColors.length === 0) {
const parentNode = nodes.find(n => n.id === edge.source);
- const mergedTraceData = isMergedTrace
- ? parentNode?.data.mergedTraces?.find((m: MergedTrace) => m.id === matchedTrace.id)
- : null;
+ const mergedData = parentNode?.data.mergedTraces?.find((m: MergedTrace) => m.id === matchedTrace.id);
+ if (mergedData) finalColors = mergedData.colors;
+ }
// Create gradient for merged traces
let gradient: string | undefined;
- if (mergedTraceData && mergedTraceData.colors.length > 0) {
- const colors = mergedTraceData.colors;
- const gradientStops = colors.map((color: string, idx: number) =>
- `${color} ${(idx / colors.length) * 100}%, ${color} ${((idx + 1) / colors.length) * 100}%`
+ if (finalColors.length > 0) {
+ const gradientStops = finalColors.map((color: string, idx: number) =>
+ `${color} ${(idx / finalColors.length) * 100}%, ${color} ${((idx + 1) / finalColors.length) * 100}%`
).join(', ');
gradient = `linear-gradient(90deg, ${gradientStops})`;
}
// Check if we need to update
- if (currentEdge.sourceHandle !== newHandleId || currentEdge.style?.stroke !== matchedTrace.color) {
+ // Update if handle changed OR color changed OR merged status/colors changed
+ const currentIsMerged = currentEdge.data?.isMerged;
+ const currentColors = currentEdge.data?.colors;
+ const colorsChanged = JSON.stringify(currentColors) !== JSON.stringify(finalColors);
+
+ if (currentEdge.sourceHandle !== newHandleId ||
+ currentEdge.style?.stroke !== matchedTrace.color ||
+ currentIsMerged !== isMergedTrace ||
+ colorsChanged) {
+
updatedEdges[edgeIndex] = {
...currentEdge,
sourceHandle: newHandleId,
@@ -1588,7 +1774,7 @@ const useFlowStore = create<FlowState>((set, get) => ({
...currentEdge.data,
gradient,
isMerged: isMergedTrace,
- colors: mergedTraceData?.colors || []
+ colors: finalColors
}
};
edgesChanged = true;
@@ -1749,7 +1935,10 @@ const useFlowStore = create<FlowState>((set, get) => ({
id: merged.id,
sourceNodeId: node.id,
color: updatedColors[0] || getStableColor(merged.id),
- messages: mergedMessages
+ messages: mergedMessages,
+ isMerged: true,
+ mergedColors: updatedColors,
+ sourceTraceIds: merged.sourceTraceIds
};
myOutgoingTraces.push(mergedOutgoing);
@@ -1775,19 +1964,22 @@ const useFlowStore = create<FlowState>((set, get) => ({
});
// Bulk Update Store
+ const uniqTraces = (list: Trace[]) => Array.from(new Map(list.map(t => [t.id, t])).values());
+ const uniqMerged = (list: MergedTrace[]) => Array.from(new Map(list.map(m => [m.id, m])).values());
+
set(state => ({
edges: updatedEdges,
nodes: state.nodes.map(n => {
- const traces = nodeIncomingTraces.get(n.id) || [];
- const outTraces = nodeOutgoingTraces.get(n.id) || [];
+ const traces = uniqTraces(nodeIncomingTraces.get(n.id) || []);
+ const outTraces = uniqTraces(nodeOutgoingTraces.get(n.id) || []);
const mergedToDelete = nodeMergedTracesToDelete.get(n.id) || [];
const updatedMerged = nodeUpdatedMergedTraces.get(n.id);
const cleanedForks = nodeForkedTracesToClean.get(n.id);
// Filter out disconnected merged traces and update messages for remaining ones
- let filteredMergedTraces = (n.data.mergedTraces || []).filter(
+ let filteredMergedTraces = uniqMerged((n.data.mergedTraces || []).filter(
(m: MergedTrace) => !mergedToDelete.includes(m.id)
- );
+ ));
// Apply updated messages and colors to merged traces
if (updatedMerged && updatedMerged.size > 0) {