summaryrefslogtreecommitdiff
path: root/backend/app/schemas.py
blob: 7a657a3206e107b01fc20fbb76b9140e771e9ca4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
from pydantic import BaseModel, Field
from typing import List, Optional, Dict, Any, Union
from enum import Enum
import time

class Role(str, Enum):
    USER = "user"
    ASSISTANT = "assistant"
    SYSTEM = "system"

class Message(BaseModel):
    id: str = Field(..., description="Unique ID for the message")
    role: Role
    content: str
    timestamp: float = Field(default_factory=time.time)
    # Metadata to track where this message came from
    source_node_id: Optional[str] = None
    model_used: Optional[str] = None

class Context(BaseModel):
    messages: List[Message] = []

class ModelProvider(str, Enum):
    OPENAI = "openai"
    GOOGLE = "google"
    CLAUDE = "claude"

class ReasoningEffort(str, Enum):
    LOW = "low"
    MEDIUM = "medium"
    HIGH = "high"

class LLMConfig(BaseModel):
    provider: ModelProvider
    model_name: str
    temperature: float = 0.7
    max_tokens: Optional[int] = None
    system_prompt: Optional[str] = None
    api_key: Optional[str] = None  # Optional override, usually from env
    enable_google_search: bool = False
    reasoning_effort: ReasoningEffort = ReasoningEffort.MEDIUM  # For OpenAI reasoning models

class MergeStrategy(str, Enum):
    RAW = "raw"
    SMART = "smart"

class NodeRunRequest(BaseModel):
    node_id: str
    incoming_contexts: List[Context] = []
    user_prompt: str
    config: LLMConfig
    merge_strategy: MergeStrategy = MergeStrategy.SMART
    attached_file_ids: List[str] = Field(default_factory=list)
    # Scopes for file_search filtering: ["project_path/node_id", ...]
    # Contains all project/node combinations in the current trace
    scopes: List[str] = Field(default_factory=list)
    
class CouncilMemberConfig(BaseModel):
    model_name: str  # e.g. "gpt-5", "claude-opus-4-6", "gemini-3-pro-preview"
    temperature: Optional[float] = None           # None = use request default
    reasoning_effort: Optional[ReasoningEffort] = None
    enable_google_search: Optional[bool] = None
    incoming_contexts: Optional[List[Context]] = None  # Per-member context override

class CouncilRunRequest(BaseModel):
    node_id: str
    incoming_contexts: List[Context] = []  # Default context for all members
    user_prompt: str
    council_models: List[CouncilMemberConfig]  # 2-6 models
    chairman_model: CouncilMemberConfig  # Model config for synthesis
    system_prompt: Optional[str] = None
    temperature: float = 0.7
    reasoning_effort: ReasoningEffort = ReasoningEffort.MEDIUM
    enable_google_search: bool = False
    merge_strategy: MergeStrategy = MergeStrategy.SMART
    attached_file_ids: List[str] = Field(default_factory=list)
    scopes: List[str] = Field(default_factory=list)

class DebateJudgeMode(str, Enum):
    EXTERNAL_JUDGE = "external_judge"
    SELF_CONVERGENCE = "self_convergence"
    DISPLAY_ONLY = "display_only"

class DebateFormat(str, Enum):
    FREE_DISCUSSION = "free_discussion"
    STRUCTURED_OPPOSITION = "structured_opposition"
    ITERATIVE_IMPROVEMENT = "iterative_improvement"
    CUSTOM = "custom"

class DebateRunRequest(BaseModel):
    node_id: str
    incoming_contexts: List[Context] = []
    user_prompt: str
    debate_models: List[CouncilMemberConfig]  # 2-6 models
    judge_model: Optional[CouncilMemberConfig] = None
    judge_mode: DebateJudgeMode = DebateJudgeMode.EXTERNAL_JUDGE
    debate_format: DebateFormat = DebateFormat.FREE_DISCUSSION
    custom_format_prompt: Optional[str] = None
    max_rounds: int = 5
    system_prompt: Optional[str] = None
    temperature: float = 0.7
    reasoning_effort: ReasoningEffort = ReasoningEffort.MEDIUM
    enable_google_search: bool = False
    merge_strategy: MergeStrategy = MergeStrategy.SMART
    attached_file_ids: List[str] = Field(default_factory=list)
    scopes: List[str] = Field(default_factory=list)

class NodeRunResponse(BaseModel):
    node_id: str
    output_context: Context
    response_content: str
    raw_response: Optional[Dict[str, Any]] = None
    usage: Optional[Dict[str, Any]] = None