diff options
| author | YurenHao0426 <blackhao0426@gmail.com> | 2026-02-13 21:43:34 +0000 |
|---|---|---|
| committer | YurenHao0426 <blackhao0426@gmail.com> | 2026-02-13 21:43:34 +0000 |
| commit | 77be59bc0a6353e98846b9c9bfa2d566efea8b1f (patch) | |
| tree | c0cc008b4705eb50616e6656f8fbc0e5b3475307 /backend/app/schemas.py | |
| parent | 30921396cb53f61eca90c85d692e0fc06d0f5ff4 (diff) | |
Add LLM Council mode for multi-model consensus
3-stage council orchestration: parallel model queries (Stage 1),
anonymous peer ranking (Stage 2), and streamed chairman synthesis
(Stage 3). Includes scope-aware file resolution for Google/Claude
providers so upstream file attachments are visible to all providers.
- Backend: council.py orchestrator, /api/run_council_stream endpoint,
query_model_full() non-streaming wrapper, resolve_provider() helper,
resolve_scoped_file_ids() for Google/Claude scope parity with OpenAI
- Frontend: council toggle UI, model checkbox selector, chairman picker,
SSE event parsing, tabbed Stage 1/2/3 response display
- Canvas: amber council node indicator with Users icon
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Diffstat (limited to 'backend/app/schemas.py')
| -rw-r--r-- | backend/app/schemas.py | 18 |
1 files changed, 17 insertions, 1 deletions
diff --git a/backend/app/schemas.py b/backend/app/schemas.py index 8e5f12c..a527004 100644 --- a/backend/app/schemas.py +++ b/backend/app/schemas.py @@ -34,7 +34,7 @@ class LLMConfig(BaseModel): provider: ModelProvider model_name: str temperature: float = 0.7 - max_tokens: int = 1000 + max_tokens: Optional[int] = None system_prompt: Optional[str] = None api_key: Optional[str] = None # Optional override, usually from env enable_google_search: bool = False @@ -55,6 +55,22 @@ class NodeRunRequest(BaseModel): # Contains all project/node combinations in the current trace scopes: List[str] = Field(default_factory=list) +class CouncilMemberConfig(BaseModel): + model_name: str # e.g. "gpt-5", "claude-opus-4-6", "gemini-3-pro-preview" + +class CouncilRunRequest(BaseModel): + node_id: str + incoming_contexts: List[Context] = [] + user_prompt: str + council_models: List[CouncilMemberConfig] # 2-6 models + chairman_model: str # Model name for synthesis + system_prompt: Optional[str] = None + temperature: float = 0.7 + reasoning_effort: ReasoningEffort = ReasoningEffort.MEDIUM + merge_strategy: MergeStrategy = MergeStrategy.SMART + attached_file_ids: List[str] = Field(default_factory=list) + scopes: List[str] = Field(default_factory=list) + class NodeRunResponse(BaseModel): node_id: str output_context: Context |
