blob: ac90bc1c05518763640a9b15822b300828323b7f (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
|
from pydantic import BaseModel, Field
from typing import List, Optional, Dict, Any, Union
from enum import Enum
import time
class Role(str, Enum):
USER = "user"
ASSISTANT = "assistant"
SYSTEM = "system"
class Message(BaseModel):
id: str = Field(..., description="Unique ID for the message")
role: Role
content: str
timestamp: float = Field(default_factory=time.time)
# Metadata to track where this message came from
source_node_id: Optional[str] = None
model_used: Optional[str] = None
class Context(BaseModel):
messages: List[Message] = []
class ModelProvider(str, Enum):
OPENAI = "openai"
GOOGLE = "google"
class LLMConfig(BaseModel):
provider: ModelProvider
model_name: str
temperature: float = 0.7
max_tokens: int = 1000
system_prompt: Optional[str] = None
api_key: Optional[str] = None # Optional override, usually from env
class MergeStrategy(str, Enum):
RAW = "raw"
SMART = "smart"
class NodeRunRequest(BaseModel):
node_id: str
incoming_contexts: List[Context] = []
user_prompt: str
config: LLMConfig
merge_strategy: MergeStrategy = MergeStrategy.SMART
class NodeRunResponse(BaseModel):
node_id: str
output_context: Context
response_content: str
raw_response: Optional[Dict[str, Any]] = None
usage: Optional[Dict[str, Any]] = None
|