diff options
| -rw-r--r-- | backend/app/services/llm.py | 2 | ||||
| -rw-r--r-- | frontend/src/components/Sidebar.tsx | 6 |
2 files changed, 7 insertions, 1 deletions
diff --git a/backend/app/services/llm.py b/backend/app/services/llm.py index 6f79d35..660a69d 100644 --- a/backend/app/services/llm.py +++ b/backend/app/services/llm.py @@ -169,7 +169,9 @@ async def stream_openai( if c_type == 'output_text': text_val = getattr(c, 'text', None) if text_val: + print(f"[responses debug] YIELDING text: {text_val[:50]}...") yield text_val + print(f"[responses debug] YIELDED successfully") found_content = True if not found_content: diff --git a/frontend/src/components/Sidebar.tsx b/frontend/src/components/Sidebar.tsx index e517693..700574a 100644 --- a/frontend/src/components/Sidebar.tsx +++ b/frontend/src/components/Sidebar.tsx @@ -325,8 +325,12 @@ const Sidebar: React.FC<SidebarProps> = ({ isOpen, onToggle, onInteract }) => { while (true) { const { value, done } = await reader.read(); - if (done) break; + if (done) { + console.log('[stream] done, fullResponse length:', fullResponse.length); + break; + } const chunk = decoder.decode(value); + console.log('[stream] received chunk:', chunk.substring(0, 50)); fullResponse += chunk; // Only update stream buffer, the display logic will check streamingNodeId setStreamBuffer(prev => prev + chunk); |
