import React, { useState, useEffect } from 'react'; import useFlowStore from '../store/flowStore'; import type { NodeData } from '../store/flowStore'; import ReactMarkdown from 'react-markdown'; import { Play, Settings, Info, Save, ChevronLeft, ChevronRight, Maximize2, Edit3, X, Check, FileText } from 'lucide-react'; interface SidebarProps { isOpen: boolean; onToggle: () => void; } const Sidebar: React.FC = ({ isOpen, onToggle }) => { const { nodes, selectedNodeId, updateNodeData, getActiveContext } = useFlowStore(); const [activeTab, setActiveTab] = useState<'interact' | 'settings' | 'debug'>('interact'); const [streamBuffer, setStreamBuffer] = useState(''); // Response Modal & Edit states const [isModalOpen, setIsModalOpen] = useState(false); const [isEditing, setIsEditing] = useState(false); const [editedResponse, setEditedResponse] = useState(''); // Summary states const [showSummaryModal, setShowSummaryModal] = useState(false); const [summaryModel, setSummaryModel] = useState('gpt-5-nano'); const [isSummarizing, setIsSummarizing] = useState(false); const selectedNode = nodes.find((n) => n.id === selectedNodeId); // Reset stream buffer and modal states when node changes useEffect(() => { setStreamBuffer(''); setIsModalOpen(false); setIsEditing(false); }, [selectedNodeId]); // Sync editedResponse when entering edit mode useEffect(() => { if (isEditing && selectedNode) { setEditedResponse(selectedNode.data.response || ''); } }, [isEditing, selectedNode?.data.response]); if (!isOpen) { return (
{selectedNode && (
{selectedNode.data.label}
)}
); } if (!selectedNode) { return (
Details

Select a node to edit

); } const handleRun = async () => { if (!selectedNode) return; updateNodeData(selectedNode.id, { status: 'loading', response: '' }); setStreamBuffer(''); // Use getActiveContext which respects the user's selected traces const context = getActiveContext(selectedNode.id); try { const response = await fetch('http://localhost:8000/api/run_node_stream', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ node_id: selectedNode.id, incoming_contexts: [{ messages: context }], // Simple list wrap for now user_prompt: selectedNode.data.userPrompt, merge_strategy: selectedNode.data.mergeStrategy || 'smart', config: { provider: selectedNode.data.model.includes('gpt') || selectedNode.data.model === 'o3' ? 'openai' : 'google', model_name: selectedNode.data.model, temperature: selectedNode.data.temperature, system_prompt: selectedNode.data.systemPrompt, api_key: selectedNode.data.apiKey, enable_google_search: selectedNode.data.enableGoogleSearch !== false, // Default true reasoning_effort: selectedNode.data.reasoningEffort || 'medium', // For reasoning models } }) }); if (!response.body) return; const reader = response.body.getReader(); const decoder = new TextDecoder(); let fullResponse = ''; while (true) { const { value, done } = await reader.read(); if (done) break; const chunk = decoder.decode(value); fullResponse += chunk; setStreamBuffer(prev => prev + chunk); // We update the store less frequently or at the end to avoid too many re-renders // But for "live" feel we might want to update local state `streamBuffer` and sync to store at end } // Update final state // Append the new interaction to the node's output messages const newUserMsg = { id: `msg_${Date.now()}_u`, role: 'user', content: selectedNode.data.userPrompt }; const newAssistantMsg = { id: `msg_${Date.now()}_a`, role: 'assistant', content: fullResponse }; updateNodeData(selectedNode.id, { status: 'success', response: fullResponse, messages: [...context, newUserMsg, newAssistantMsg] as any }); // Auto-generate title using gpt-5-nano (async, non-blocking) // Always regenerate title after each query generateTitle(selectedNode.id, selectedNode.data.userPrompt, fullResponse); } catch (error) { console.error(error); updateNodeData(selectedNode.id, { status: 'error' }); } }; const handleChange = (field: keyof NodeData, value: any) => { updateNodeData(selectedNode.id, { [field]: value }); }; const handleSaveEdit = () => { if (!selectedNode) return; updateNodeData(selectedNode.id, { response: editedResponse }); setIsEditing(false); }; const handleCancelEdit = () => { setIsEditing(false); setEditedResponse(selectedNode?.data.response || ''); }; // Summarize response const handleSummarize = async () => { if (!selectedNode?.data.response) return; setIsSummarizing(true); setShowSummaryModal(false); try { const res = await fetch('http://localhost:8000/api/summarize', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ content: selectedNode.data.response, model: summaryModel }) }); if (res.ok) { const data = await res.json(); if (data.summary) { // Replace response with summary updateNodeData(selectedNode.id, { response: data.summary }); } } } catch (error) { console.error('Summarization failed:', error); } finally { setIsSummarizing(false); } }; // Auto-generate title using gpt-5-nano const generateTitle = async (nodeId: string, userPrompt: string, response: string) => { try { const res = await fetch('http://localhost:8000/api/generate_title', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ user_prompt: userPrompt, response }) }); if (res.ok) { const data = await res.json(); if (data.title) { updateNodeData(nodeId, { label: data.title }); } } } catch (error) { console.error('Failed to generate title:', error); // Silently fail - keep the original title } }; return (
{/* Header */}
handleChange('label', e.target.value)} className="font-bold text-lg bg-transparent border-none focus:ring-0 focus:outline-none w-full" />
{selectedNode.data.status}
ID: {selectedNode.id}
{/* Tabs */}
{/* Content */}
{activeTab === 'interact' && (
{/* Trace Selector */} {selectedNode.data.traces && selectedNode.data.traces.length > 0 && (
{selectedNode.data.traces.map((trace) => { const isActive = selectedNode.data.activeTraceIds?.includes(trace.id); return (
{ const current = selectedNode.data.activeTraceIds || []; const next = [trace.id]; // Single select mode handleChange('activeTraceIds', next); }} >
#{trace.id.slice(-4)}
From Node: {trace.sourceNodeId}
{trace.messages.length} msgs
); })}
)}