summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYurenHao0426 <blackhao0426@gmail.com>2026-02-13 06:02:20 +0000
committerYurenHao0426 <blackhao0426@gmail.com>2026-02-13 06:02:20 +0000
commitde9e6207c84ceb55ef4f377d68f93a117f1f1cc2 (patch)
tree50bf101b9542dcd7840f7b66aee774715d188349
parent61293147c1d6f1cdde689c36faad923b600a4f6e (diff)
Auto re-upload expired Google files on node run
Add _check_google_file_active() to verify cached Google file URIs before reuse. If expired (48h limit), clears cache and transparently re-uploads via ensure_google_file_upload(). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
-rw-r--r--backend/app/main.py47
1 files changed, 38 insertions, 9 deletions
diff --git a/backend/app/main.py b/backend/app/main.py
index 75491bb..a6f4f10 100644
--- a/backend/app/main.py
+++ b/backend/app/main.py
@@ -808,6 +808,29 @@ def save_files_index(user: str, items: List[FileMeta]):
json.dump([item.model_dump() for item in items], f, ensure_ascii=False, indent=2)
+async def _check_google_file_active(uri_or_name: str, api_key: str = None) -> bool:
+ """Check if a Google file reference is still ACTIVE (not expired)."""
+ key = api_key or os.getenv("GOOGLE_API_KEY")
+ if not key:
+ return False
+ try:
+ client = genai.Client(api_key=key)
+ # Google file names look like "files/abc123", URIs like "https://..."
+ # files.get() needs the name, but we can extract it or just try
+ name = uri_or_name
+ if "://" in uri_or_name:
+ # Try to get by URI — extract the name from the path
+ # URI format: https://generativelanguage.googleapis.com/v1beta/files/abc123
+ parts = uri_or_name.rstrip("/").split("/")
+ name = f"files/{parts[-1]}" if parts else uri_or_name
+ info = await asyncio.to_thread(client.files.get, name=name)
+ state = str(getattr(info, "state", "")).upper()
+ return state.endswith("ACTIVE") or state == "ACTIVE"
+ except Exception as e:
+ logger.debug("Google file check failed for %s: %s", uri_or_name, e)
+ return False
+
+
async def prepare_attachments(
user: str,
target_provider: str,
@@ -882,20 +905,26 @@ async def prepare_attachments(
raise HTTPException(status_code=500, detail=f"OpenAI upload failed: {str(e)}")
elif provider_norm == ModelProvider.GOOGLE or provider_norm == "google":
- # Reuse cached google_file_uri
+ # Reuse cached google_file_uri — but verify it's still ACTIVE
cached_uri = meta.google_file_uri or (
meta.provider_file_id if meta.provider == "google" and meta.provider_file_id and "://" in meta.provider_file_id else None
)
if cached_uri:
- attachments.append({
- "provider": "google",
- "uri": cached_uri,
- "name": meta.name,
- "mime": meta.mime,
- })
- continue
+ still_valid = await _check_google_file_active(cached_uri, llm_config.api_key)
+ if still_valid:
+ attachments.append({
+ "provider": "google",
+ "uri": cached_uri,
+ "name": meta.name,
+ "mime": meta.mime,
+ })
+ continue
+ # Expired — clear cache, fall through to re-upload
+ logger.info("Google file expired, re-uploading: %s (%s)", meta.name, cached_uri)
+ meta.google_file_uri = None
+ changed = True
- # On-demand upload via shared helper
+ # On-demand upload (or re-upload after expiry)
uri = await ensure_google_file_upload(meta, path, llm_config.api_key)
if not uri:
raise HTTPException(status_code=500, detail=f"Google upload failed for {meta.name}")