summaryrefslogtreecommitdiff
path: root/backend/app/main.py
diff options
context:
space:
mode:
Diffstat (limited to 'backend/app/main.py')
-rw-r--r--backend/app/main.py60
1 files changed, 44 insertions, 16 deletions
diff --git a/backend/app/main.py b/backend/app/main.py
index c254652..b0d6138 100644
--- a/backend/app/main.py
+++ b/backend/app/main.py
@@ -1,4 +1,5 @@
import asyncio
+import logging
import tempfile
import time
from fastapi import FastAPI, HTTPException, Depends
@@ -21,6 +22,20 @@ from sqlalchemy.orm import Session
load_dotenv()
+# --------------- Logging Setup ---------------
+_LOG_DIR = os.path.join(os.path.abspath(os.getenv("DATA_ROOT", os.path.join(os.getcwd(), "data"))), "logs")
+os.makedirs(_LOG_DIR, exist_ok=True)
+
+logging.basicConfig(
+ level=logging.DEBUG,
+ format="%(asctime)s [%(name)s] %(levelname)s %(message)s",
+ handlers=[
+ logging.StreamHandler(),
+ logging.FileHandler(os.path.join(_LOG_DIR, "contextflow.log"), encoding="utf-8"),
+ ],
+)
+logger = logging.getLogger("contextflow")
+
app = FastAPI(title="ContextFlow Backend")
# Include authentication router
@@ -53,9 +68,9 @@ async def startup_event():
)
db.add(test_user)
db.commit()
- print("[startup] Created default test user (test/114514)")
+ logger.info("Created default test user (test/114514)")
else:
- print("[startup] Test user already exists")
+ logger.info("Test user already exists")
finally:
db.close()
@@ -77,11 +92,19 @@ def get_user_api_key(user: User | None, provider: str) -> str | None:
return user.openai_api_key
if provider in ("google", "gemini") and user.gemini_api_key:
return user.gemini_api_key
+ if provider == "claude" and user.claude_api_key:
+ return user.claude_api_key
+ if provider == "openrouter" and user.openrouter_api_key:
+ return user.openrouter_api_key
# Fallback to environment variables
if provider == "openai":
return os.getenv("OPENAI_API_KEY")
if provider in ("google", "gemini"):
return os.getenv("GOOGLE_API_KEY")
+ if provider == "claude":
+ return os.getenv("ANTHROPIC_API_KEY")
+ if provider == "openrouter":
+ return os.getenv("OPENROUTER_API_KEY")
return None
def ensure_user_root(user: str) -> str:
@@ -317,14 +340,14 @@ async def run_node_stream(
if vs_id:
vs_ids = [vs_id]
except Exception as e:
- print(f"[warn] Could not get vector store: {e}")
+ logger.warning("Could not get vector store: %s", e)
if vs_ids:
tool_def = {"type": "file_search", "vector_store_ids": vs_ids}
if filters:
tool_def["filters"] = filters
tools.append(tool_def)
- print(f"[openai file_search] vs_ids={vs_ids} refs={debug_refs} filters={filters}")
+ logger.debug("openai file_search: vs_ids=%s refs=%s filters=%s", vs_ids, debug_refs, filters)
elif request.config.provider == ModelProvider.GOOGLE:
attachments = await prepare_attachments(
user=username,
@@ -333,8 +356,10 @@ async def run_node_stream(
llm_config=request.config,
)
+ openrouter_key = get_user_api_key(current_user, "openrouter")
+
return StreamingResponse(
- llm_streamer(execution_context, request.user_prompt, request.config, attachments, tools),
+ llm_streamer(execution_context, request.user_prompt, request.config, attachments, tools, openrouter_api_key=openrouter_key),
media_type="text/event-stream"
)
@@ -413,12 +438,15 @@ def save_blueprint(req: SaveBlueprintRequest):
try:
full_path = safe_path(req.user, req.path)
os.makedirs(os.path.dirname(full_path), exist_ok=True)
+ raw = json.dumps(req.content, ensure_ascii=False, indent=2)
+ logger.info("save_blueprint: user=%s path=%s size=%d bytes", req.user, req.path, len(raw))
with open(full_path, "w", encoding="utf-8") as f:
- json.dump(req.content, f, ensure_ascii=False, indent=2)
+ f.write(raw)
return {"ok": True}
except HTTPException:
raise
except Exception as e:
+ logger.error("save_blueprint failed: user=%s path=%s error=%s", req.user, req.path, e)
raise HTTPException(status_code=500, detail=str(e))
@@ -582,7 +610,7 @@ async def remove_file_from_vector_store(vs_id: str, file_id: str, client=None):
try:
await client.vector_stores.files.delete(vector_store_id=vs_id, file_id=file_id)
except Exception as e:
- print(f"[warn] remove_file_from_vector_store failed: {e}")
+ logger.warning("remove_file_from_vector_store failed: %s", e)
async def add_file_to_vector_store(vs_id: str, file_id: str, client=None):
"""
@@ -658,7 +686,7 @@ async def prepare_attachments(
for fid in attached_ids:
meta = items_map.get(fid)
if not meta:
- print(f"[warn] Attached file id not found, skipping: {fid}")
+ logger.warning("Attached file id not found, skipping: %s", fid)
continue
path = os.path.join(files_root(user), fid)
@@ -747,7 +775,7 @@ async def prepare_attachments(
await asyncio.sleep(1)
except Exception:
await asyncio.sleep(1)
- print(f"[google upload] name={google_name} uri={google_uri}")
+ logger.debug("google upload: name=%s uri=%s", google_name, google_uri)
uri = google_uri or google_name
if not uri:
@@ -770,7 +798,7 @@ async def prepare_attachments(
raise HTTPException(status_code=400, detail=f"Unsupported provider for attachments: {target_provider}")
# Debug log
- print(f"[attachments] provider={provider_norm} count={len(attachments)} detail={[{'name': a.get('name'), 'id': a.get('file_id', a.get('uri'))} for a in attachments]}")
+ logger.debug("attachments: provider=%s count=%d detail=%s", provider_norm, len(attachments), [{'name': a.get('name'), 'id': a.get('file_id', a.get('uri'))} for a in attachments])
return attachments
@@ -801,7 +829,7 @@ async def prepare_openai_vector_search(
for item in items:
if item.scopes and any(s in scopes for s in item.scopes):
relevant_files_map[item.id] = item
- print(f"[file_search] scopes={scopes} matched_files={[f.name for f in relevant_files_map.values()]}")
+ logger.debug("file_search: scopes=%s matched_files=%s", scopes, [f.name for f in relevant_files_map.values()])
# Second: also add explicitly attached files (they should always be searchable)
if attached_ids:
@@ -809,7 +837,7 @@ async def prepare_openai_vector_search(
meta = items_map.get(fid)
if meta and fid not in relevant_files_map:
relevant_files_map[fid] = meta
- print(f"[file_search] adding explicitly attached file: {meta.name}")
+ logger.debug("file_search: adding explicitly attached file: %s", meta.name)
relevant_files = list(relevant_files_map.values())
@@ -824,12 +852,12 @@ async def prepare_openai_vector_search(
for meta in relevant_files:
path = os.path.join(files_root(user), meta.id)
if not os.path.exists(path):
- print(f"[warn] Attached file missing on disk, skipping: {meta.id}")
+ logger.warning("Attached file missing on disk, skipping: %s", meta.id)
continue
# Enforce 50MB OpenAI limit
file_size = os.path.getsize(path)
if file_size > OPENAI_MAX_FILE_SIZE:
- print(f"[warn] File {meta.name} exceeds OpenAI 50MB limit, skipping")
+ logger.warning("File %s exceeds OpenAI 50MB limit, skipping", meta.name)
continue
openai_file_id, vs_id = await ensure_openai_file_and_index(user, meta, path, llm_config)
@@ -1006,9 +1034,9 @@ async def upload_file(
if provider_normalized == "openai" and not meta.provider_file_id:
meta.provider_file_id = openai_file_id
except Exception as e:
- print(f"[warn] OpenAI indexing failed for {file_name}: {e}")
+ logger.warning("OpenAI indexing failed for %s: %s", file_name, e)
else:
- print(f"[warn] Skipping OpenAI indexing for {file_name}: exceeds 50MB")
+ logger.warning("Skipping OpenAI indexing for %s: exceeds 50MB", file_name)
items.append(meta)
save_files_index(user, items)