From d9868550e66fe8aaa7fff55a8e24b871ee51e3b1 Mon Sep 17 00:00:00 2001 From: blackhao <13851610112@163.com> Date: Fri, 5 Dec 2025 20:40:40 -0600 Subject: init: add project files and ignore secrets --- backend/app/schemas.py | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 backend/app/schemas.py (limited to 'backend/app/schemas.py') diff --git a/backend/app/schemas.py b/backend/app/schemas.py new file mode 100644 index 0000000..ac90bc1 --- /dev/null +++ b/backend/app/schemas.py @@ -0,0 +1,52 @@ +from pydantic import BaseModel, Field +from typing import List, Optional, Dict, Any, Union +from enum import Enum +import time + +class Role(str, Enum): + USER = "user" + ASSISTANT = "assistant" + SYSTEM = "system" + +class Message(BaseModel): + id: str = Field(..., description="Unique ID for the message") + role: Role + content: str + timestamp: float = Field(default_factory=time.time) + # Metadata to track where this message came from + source_node_id: Optional[str] = None + model_used: Optional[str] = None + +class Context(BaseModel): + messages: List[Message] = [] + +class ModelProvider(str, Enum): + OPENAI = "openai" + GOOGLE = "google" + +class LLMConfig(BaseModel): + provider: ModelProvider + model_name: str + temperature: float = 0.7 + max_tokens: int = 1000 + system_prompt: Optional[str] = None + api_key: Optional[str] = None # Optional override, usually from env + +class MergeStrategy(str, Enum): + RAW = "raw" + SMART = "smart" + +class NodeRunRequest(BaseModel): + node_id: str + incoming_contexts: List[Context] = [] + user_prompt: str + config: LLMConfig + merge_strategy: MergeStrategy = MergeStrategy.SMART + +class NodeRunResponse(BaseModel): + node_id: str + output_context: Context + response_content: str + raw_response: Optional[Dict[str, Any]] = None + usage: Optional[Dict[str, Any]] = None + -- cgit v1.2.3