diff options
| author | haoyuren <13851610112@163.com> | 2026-03-13 16:56:13 -0500 |
|---|---|---|
| committer | haoyuren <13851610112@163.com> | 2026-03-13 16:56:13 -0500 |
| commit | 2d268365bc1f9f9b550812a8aa406b7abc005497 (patch) | |
| tree | bbdd21c471e493573823af95ee651cc39297d82e /src | |
| parent | c309944494eb2de63bf9b35ea722d50b52e688a3 (diff) | |
Add bidirectional sync for binary files (PDF, images, etc.)
FileSyncBridge now handles binary fileRefs in addition to text docs:
- Downloads all binary files to temp dir on project connect
- Watches for binary file changes on disk and uploads via REST API
- Listens for Overleaf socket events (reciveNewFile, reciveNewDoc,
removeEntity, reciveEntityRename) to sync remote changes to disk
- Tracks binary files by SHA1 hash to avoid redundant uploads
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Diffstat (limited to 'src')
| -rw-r--r-- | src/main/fileSyncBridge.ts | 497 | ||||
| -rw-r--r-- | src/main/index.ts | 3 |
2 files changed, 457 insertions, 43 deletions
diff --git a/src/main/fileSyncBridge.ts b/src/main/fileSyncBridge.ts index 0bd24b0..e49b86d 100644 --- a/src/main/fileSyncBridge.ts +++ b/src/main/fileSyncBridge.ts @@ -1,12 +1,13 @@ // Copyright (c) 2026 Yuren Hao // Licensed under AGPL-3.0 - see LICENSE file -// Bidirectional file sync bridge: temp dir ↔ Overleaf via OT -import { join } from 'path' -import { readFile, writeFile, mkdir } from 'fs/promises' +// Bidirectional file sync bridge: temp dir ↔ Overleaf via OT (text) + REST (binary) +import { join, dirname } from 'path' +import { readFile, writeFile, mkdir, unlink, rename as fsRename } from 'fs/promises' import { createHash } from 'crypto' import * as chokidar from 'chokidar' import { diff_match_patch } from 'diff-match-patch' +import { net } from 'electron' import type { BrowserWindow } from 'electron' import type { OverleafSocket } from './overleafSocket' import { OtClient } from './otClient' @@ -16,7 +17,8 @@ import { isInsert, isDelete } from './otTypes' const dmp = new diff_match_patch() export class FileSyncBridge { - private lastKnownContent = new Map<string, string>() // relPath → content + private lastKnownContent = new Map<string, string>() // relPath → content (text docs) + private binaryHashes = new Map<string, string>() // relPath → sha1 hash (binary files) private writesInProgress = new Set<string>() // relPaths being written by bridge private debounceTimers = new Map<string, ReturnType<typeof setTimeout>>() private otClients = new Map<string, OtClient>() // docId → OtClient (non-editor docs) @@ -27,7 +29,12 @@ export class FileSyncBridge { private tmpDir: string private docPathMap: Record<string, string> // docId → relPath private pathDocMap: Record<string, string> // relPath → docId + private fileRefPathMap: Record<string, string> // fileRefId → relPath + private pathFileRefMap: Record<string, string> // relPath → fileRefId private mainWindow: BrowserWindow + private projectId: string + private cookie: string + private csrfToken: string private serverEventHandler: ((name: string, args: unknown[]) => void) | null = null private stopped = false @@ -37,13 +44,28 @@ export class FileSyncBridge { tmpDir: string, docPathMap: Record<string, string>, pathDocMap: Record<string, string>, - mainWindow: BrowserWindow + fileRefs: Array<{ id: string; path: string }>, + mainWindow: BrowserWindow, + projectId: string, + cookie: string, + csrfToken: string ) { this.socket = socket this.tmpDir = tmpDir this.docPathMap = docPathMap this.pathDocMap = pathDocMap this.mainWindow = mainWindow + this.projectId = projectId + this.cookie = cookie + this.csrfToken = csrfToken + + // Build fileRef maps + this.fileRefPathMap = {} + this.pathFileRefMap = {} + for (const ref of fileRefs) { + this.fileRefPathMap[ref.id] = ref.path + this.pathFileRefMap[ref.path] = ref.id + } } async start(): Promise<void> { @@ -73,28 +95,29 @@ export class FileSyncBridge { } } - // Listen for server events (remote ops on non-editor docs) + // Download all binary files + const fileRefIds = Object.keys(this.fileRefPathMap) + for (const fileRefId of fileRefIds) { + const relPath = this.fileRefPathMap[fileRefId] + try { + await this.downloadBinary(fileRefId, relPath) + } catch (e) { + console.log(`[FileSyncBridge] failed to download ${relPath}:`, e) + } + } + + // Listen for server events this.serverEventHandler = (name: string, args: unknown[]) => { if (name === 'otUpdateApplied') { - const update = args[0] as { doc?: string; op?: OtOp[]; v?: number } | undefined - if (!update?.doc) return - const docId = update.doc - - // For non-editor docs, process remote ops through bridge's OtClient - if (!this.editorDocs.has(docId) && update.op && update.v !== undefined) { - const otClient = this.otClients.get(docId) - if (otClient) { - otClient.onRemoteOps(update.op, update.v) - } - } - - // For non-editor docs, handle ack (op with no ops array = ack for our own op) - if (!this.editorDocs.has(docId) && !update.op) { - const otClient = this.otClients.get(docId) - if (otClient) { - otClient.onAck() - } - } + this.handleOtUpdate(args) + } else if (name === 'reciveNewFile') { + this.handleNewFile(args) + } else if (name === 'reciveNewDoc') { + this.handleNewDoc(args) + } else if (name === 'removeEntity') { + this.handleRemoveEntity(args) + } else if (name === 'reciveEntityRename') { + this.handleEntityRename(args) } } this.socket.on('serverEvent', this.serverEventHandler) @@ -105,7 +128,7 @@ export class FileSyncBridge { awaitWriteFinish: { stabilityThreshold: 100, pollInterval: 50 }, ignored: [ /(^|[/\\])\../, // dotfiles - /\.(aux|log|pdf|fls|fdb_latexmk|synctex\.gz|bbl|blg|out|toc|lof|lot|nav|snm|vrb)$/ // LaTeX output files + /\.(aux|log|fls|fdb_latexmk|synctex\.gz|bbl|blg|out|toc|lof|lot|nav|snm|vrb)$/ // LaTeX output files (not pdf!) ] }) @@ -116,13 +139,13 @@ export class FileSyncBridge { this.watcher.on('add', (absPath: string) => { const relPath = absPath.replace(this.tmpDir + '/', '') - // Only process if it's a known doc - if (this.pathDocMap[relPath]) { + // Process if it's a known doc or fileRef + if (this.pathDocMap[relPath] || this.pathFileRefMap[relPath]) { this.onFileChanged(relPath) } }) - console.log(`[FileSyncBridge] started, watching ${this.tmpDir}, ${docIds.length} docs synced`) + console.log(`[FileSyncBridge] started, watching ${this.tmpDir}, ${docIds.length} docs + ${fileRefIds.length} files synced`) } async stop(): Promise<void> { @@ -148,12 +171,210 @@ export class FileSyncBridge { this.otClients.clear() this.lastKnownContent.clear() + this.binaryHashes.clear() this.writesInProgress.clear() this.editorDocs.clear() console.log('[FileSyncBridge] stopped') } + // ── OT update handler ───────────────────────────────────── + + private handleOtUpdate(args: unknown[]): void { + const update = args[0] as { doc?: string; op?: OtOp[]; v?: number } | undefined + if (!update?.doc) return + const docId = update.doc + + // For non-editor docs, process remote ops through bridge's OtClient + if (!this.editorDocs.has(docId) && update.op && update.v !== undefined) { + const otClient = this.otClients.get(docId) + if (otClient) { + otClient.onRemoteOps(update.op, update.v) + } + } + + // For non-editor docs, handle ack (op with no ops array = ack for our own op) + if (!this.editorDocs.has(docId) && !update.op) { + const otClient = this.otClients.get(docId) + if (otClient) { + otClient.onAck() + } + } + } + + // ── Binary file event handlers (socket) ──────────────────── + + /** Remote: new file added to project */ + private handleNewFile(args: unknown[]): void { + // args: [folderId, fileRef, source, linkedFileData, userId] + const folderId = args[0] as string + const fileRef = args[1] as { _id: string; name: string } | undefined + if (!fileRef?._id || !fileRef?.name) return + + // Determine relPath from folder + const folderPath = this.findFolderPath(folderId) + const relPath = folderPath + fileRef.name + + console.log(`[FileSyncBridge] remote new file: ${relPath} (${fileRef._id})`) + + // Register in maps + this.fileRefPathMap[fileRef._id] = relPath + this.pathFileRefMap[relPath] = fileRef._id + + // Download to disk + this.downloadBinary(fileRef._id, relPath).catch((e) => { + console.log(`[FileSyncBridge] failed to download new file ${relPath}:`, e) + }) + } + + /** Remote: new doc added to project */ + private handleNewDoc(args: unknown[]): void { + // args: [folderId, doc, source, userId] + const folderId = args[0] as string + const doc = args[1] as { _id: string; name: string } | undefined + if (!doc?._id || !doc?.name) return + + const folderPath = this.findFolderPath(folderId) + const relPath = folderPath + doc.name + + console.log(`[FileSyncBridge] remote new doc: ${relPath} (${doc._id})`) + + // Register in maps + this.docPathMap[doc._id] = relPath + this.pathDocMap[relPath] = doc._id + + // Join and sync the new doc + this.socket.joinDoc(doc._id).then((result) => { + const content = (result.docLines || []).join('\n') + this.lastKnownContent.set(relPath, content) + + const otClient = new OtClient( + result.version, + (ops, version) => this.sendOps(doc._id, ops, version), + (ops) => this.onRemoteApply(doc._id, ops) + ) + this.otClients.set(doc._id, otClient) + + this.writeToDisk(relPath, content) + }).catch((e) => { + console.log(`[FileSyncBridge] failed to join new doc ${relPath}:`, e) + }) + } + + /** Remote: entity removed */ + private handleRemoveEntity(args: unknown[]): void { + const entityId = args[0] as string + if (!entityId) return + + // Check if it's a doc + const docPath = this.docPathMap[entityId] + if (docPath) { + console.log(`[FileSyncBridge] remote remove doc: ${docPath}`) + delete this.docPathMap[entityId] + delete this.pathDocMap[docPath] + this.lastKnownContent.delete(docPath) + this.otClients.delete(entityId) + this.deleteFromDisk(docPath) + return + } + + // Check if it's a fileRef + const filePath = this.fileRefPathMap[entityId] + if (filePath) { + console.log(`[FileSyncBridge] remote remove file: ${filePath}`) + delete this.fileRefPathMap[entityId] + delete this.pathFileRefMap[filePath] + this.binaryHashes.delete(filePath) + this.deleteFromDisk(filePath) + } + } + + /** Remote: entity renamed */ + private handleEntityRename(args: unknown[]): void { + const entityId = args[0] as string + const newName = args[1] as string + if (!entityId || !newName) return + + // Check if it's a doc + const oldDocPath = this.docPathMap[entityId] + if (oldDocPath) { + const newPath = dirname(oldDocPath) === '.' ? newName : dirname(oldDocPath) + '/' + newName + console.log(`[FileSyncBridge] remote rename doc: ${oldDocPath} → ${newPath}`) + + // Update maps + this.docPathMap[entityId] = newPath + delete this.pathDocMap[oldDocPath] + this.pathDocMap[newPath] = entityId + + // Move content + const content = this.lastKnownContent.get(oldDocPath) + if (content !== undefined) { + this.lastKnownContent.delete(oldDocPath) + this.lastKnownContent.set(newPath, content) + } + + // Rename on disk + this.renameOnDisk(oldDocPath, newPath) + return + } + + // Check if it's a fileRef + const oldFilePath = this.fileRefPathMap[entityId] + if (oldFilePath) { + const newPath = dirname(oldFilePath) === '.' ? newName : dirname(oldFilePath) + '/' + newName + console.log(`[FileSyncBridge] remote rename file: ${oldFilePath} → ${newPath}`) + + // Update maps + this.fileRefPathMap[entityId] = newPath + delete this.pathFileRefMap[oldFilePath] + this.pathFileRefMap[newPath] = entityId + + // Move hash + const hash = this.binaryHashes.get(oldFilePath) + if (hash) { + this.binaryHashes.delete(oldFilePath) + this.binaryHashes.set(newPath, hash) + } + + // Rename on disk + this.renameOnDisk(oldFilePath, newPath) + } + } + + /** Find folder path prefix from folderId by looking at existing paths */ + private findFolderPath(folderId: string): string { + // Check doc paths to find a doc in this folder + for (const relPath of Object.values(this.docPathMap)) { + // Not a reliable method — fall back to root + } + // Check fileRef paths + for (const relPath of Object.values(this.fileRefPathMap)) { + // Not reliable either + } + // For root folder, return empty + // For subfolders, we'd need the folder tree — but we can look for folder paths + // ending with the folderId in the socket's project data + const projectData = this.socket.projectData + if (projectData) { + const path = this.findFolderPathInTree(projectData.project.rootFolder, folderId, '') + if (path !== null) return path + } + return '' // default to root + } + + private findFolderPathInTree(folders: Array<{ _id: string; name: string; folders?: unknown[] }>, targetId: string, prefix: string): string | null { + for (const f of folders) { + if (f._id === targetId) return prefix + const sub = f.folders as Array<{ _id: string; name: string; folders?: unknown[] }> | undefined + if (sub) { + const subPrefix = prefix ? prefix + f.name + '/' : f.name + '/' + const result = this.findFolderPathInTree(sub, targetId, subPrefix) + if (result !== null) return result + } + } + return null + } + // ── Disk change handler ────────────────────────────────────── private onFileChanged(relPath: string): void { @@ -175,9 +396,20 @@ export class FileSyncBridge { private async processChange(relPath: string): Promise<void> { if (this.stopped) return + // Text doc? const docId = this.pathDocMap[relPath] - if (!docId) return + if (docId) { + return this.processDocChange(relPath, docId) + } + + // Binary fileRef? + const fileRefId = this.pathFileRefMap[relPath] + if (fileRefId) { + return this.processBinaryChange(relPath, fileRefId) + } + } + private async processDocChange(relPath: string, docId: string): Promise<void> { let newContent: string try { newContent = await readFile(join(this.tmpDir, relPath), 'utf-8') @@ -214,6 +446,168 @@ export class FileSyncBridge { } } + private async processBinaryChange(relPath: string, fileRefId: string): Promise<void> { + const fullPath = join(this.tmpDir, relPath) + + let fileData: Buffer + try { + fileData = await readFile(fullPath) + } catch { + return // file deleted or unreadable + } + + // Layer 2: Hash equality check + const newHash = createHash('sha1').update(fileData).digest('hex') + const oldHash = this.binaryHashes.get(relPath) + if (newHash === oldHash) return + + console.log(`[FileSyncBridge] binary change detected: ${relPath} (${fileData.length} bytes)`) + this.binaryHashes.set(relPath, newHash) + + // Upload to Overleaf via REST API (this replaces the existing file) + try { + await this.uploadBinary(relPath, fileData) + } catch (e) { + console.log(`[FileSyncBridge] failed to upload binary ${relPath}:`, e) + } + } + + // ── Binary file download/upload ──────────────────────────── + + private async downloadBinary(fileRefId: string, relPath: string): Promise<void> { + const fullPath = join(this.tmpDir, relPath) + const dir = dirname(fullPath) + await mkdir(dir, { recursive: true }) + + return new Promise((resolve, reject) => { + const url = `https://www.overleaf.com/project/${this.projectId}/file/${fileRefId}` + const req = net.request(url) + req.setHeader('Cookie', this.cookie) + req.setHeader('User-Agent', 'Mozilla/5.0') + + const chunks: Buffer[] = [] + req.on('response', (res) => { + res.on('data', (chunk) => chunks.push(chunk as Buffer)) + res.on('end', async () => { + try { + const data = Buffer.concat(chunks) + // Set write guard before writing + this.writesInProgress.add(relPath) + await writeFile(fullPath, data) + setTimeout(() => this.writesInProgress.delete(relPath), 150) + + // Store hash + this.binaryHashes.set(relPath, createHash('sha1').update(data).digest('hex')) + resolve() + } catch (e) { + reject(e) + } + }) + }) + req.on('error', reject) + req.end() + }) + } + + private async uploadBinary(relPath: string, fileData: Buffer): Promise<void> { + const fileName = relPath.includes('/') ? relPath.split('/').pop()! : relPath + const folderId = this.findFolderIdForPath(relPath) + + const ext = fileName.split('.').pop()?.toLowerCase() || '' + const mimeMap: Record<string, string> = { + png: 'image/png', jpg: 'image/jpeg', jpeg: 'image/jpeg', gif: 'image/gif', + svg: 'image/svg+xml', pdf: 'application/pdf', eps: 'application/postscript', + zip: 'application/zip', bmp: 'image/bmp', tiff: 'image/tiff', + } + const mime = mimeMap[ext] || 'application/octet-stream' + const boundary = '----FormBoundary' + Math.random().toString(36).slice(2) + + const parts: Buffer[] = [] + parts.push(Buffer.from(`--${boundary}\r\nContent-Disposition: form-data; name="name"\r\n\r\n${fileName}\r\n`)) + parts.push(Buffer.from(`--${boundary}\r\nContent-Disposition: form-data; name="type"\r\n\r\n${mime}\r\n`)) + parts.push(Buffer.from(`--${boundary}\r\nContent-Disposition: form-data; name="qqfile"; filename="${fileName}"\r\nContent-Type: ${mime}\r\n\r\n`)) + parts.push(fileData) + parts.push(Buffer.from(`\r\n--${boundary}--\r\n`)) + + const body = Buffer.concat(parts) + + return new Promise((resolve, reject) => { + const req = net.request({ + method: 'POST', + url: `https://www.overleaf.com/project/${this.projectId}/upload?folder_id=${folderId}` + }) + req.setHeader('Cookie', this.cookie) + req.setHeader('Content-Type', `multipart/form-data; boundary=${boundary}`) + req.setHeader('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36') + req.setHeader('Accept', 'application/json') + req.setHeader('Referer', `https://www.overleaf.com/project/${this.projectId}`) + req.setHeader('Origin', 'https://www.overleaf.com') + + if (this.csrfToken) req.setHeader('x-csrf-token', this.csrfToken) + + let resBody = '' + req.on('response', (res) => { + res.on('data', (chunk: Buffer) => { resBody += chunk.toString() }) + res.on('end', () => { + console.log(`[FileSyncBridge] upload ${relPath}: ${res.statusCode} ${resBody.slice(0, 200)}`) + try { + const data = JSON.parse(resBody) + if (data.success !== false && !data.error) { + // Upload replaces the file — update our fileRef ID if it changed + if (data.entity_id && data.entity_id !== this.pathFileRefMap[relPath]) { + const oldId = this.pathFileRefMap[relPath] + if (oldId) delete this.fileRefPathMap[oldId] + this.fileRefPathMap[data.entity_id] = relPath + this.pathFileRefMap[relPath] = data.entity_id + } + resolve() + } else { + reject(new Error(data.error || 'Upload failed')) + } + } catch { + reject(new Error(`HTTP ${res.statusCode}: ${resBody.slice(0, 200)}`)) + } + }) + }) + req.on('error', reject) + req.write(body) + req.end() + }) + } + + /** Find the folder ID for a given relPath */ + private findFolderIdForPath(relPath: string): string { + const dir = dirname(relPath) + if (dir === '.') { + // Root folder + const projectData = this.socket.projectData + return projectData?.project.rootFolder?.[0]?._id || '' + } + + // Search project data for the folder + const projectData = this.socket.projectData + if (projectData) { + const folderId = this.findFolderIdInTree(projectData.project.rootFolder, dir + '/', '') + if (folderId) return folderId + } + + // Fallback to root + return projectData?.project.rootFolder?.[0]?._id || '' + } + + private findFolderIdInTree(folders: Array<{ _id: string; name: string; folders?: unknown[] }>, targetPath: string, prefix: string): string | null { + for (const f of folders) { + const currentPath = prefix ? prefix + f.name + '/' : f.name + '/' + if (currentPath === targetPath) return f._id + const sub = f.folders as Array<{ _id: string; name: string; folders?: unknown[] }> | undefined + if (sub) { + const result = this.findFolderIdInTree(sub, targetPath, currentPath) + if (result) return result + } + } + return null + } + // ── Send OT ops to Overleaf (for non-editor docs) ─────────── private sendOps(docId: string, ops: OtOp[], version: number): void { @@ -254,15 +648,12 @@ export class FileSyncBridge { /** Renderer opened this doc in the editor — bridge stops owning OT */ addEditorDoc(docId: string): void { this.editorDocs.add(docId) - // Bridge's OtClient for this doc is no longer used (renderer has its own) - // But we keep the doc joined in the socket } /** Renderer closed this doc from the editor — bridge takes over OT */ removeEditorDoc(docId: string): void { this.editorDocs.delete(docId) - // Re-join the doc to get fresh version, since renderer's OtClient was tracking it const relPath = this.docPathMap[docId] if (!relPath) return @@ -270,7 +661,6 @@ export class FileSyncBridge { const content = (result.docLines || []).join('\n') this.lastKnownContent.set(relPath, content) - // Create fresh OtClient with current version const otClient = new OtClient( result.version, (ops, version) => this.sendOps(docId, ops, version), @@ -278,7 +668,6 @@ export class FileSyncBridge { ) this.otClients.set(docId, otClient) - // Write latest content to disk this.writeToDisk(relPath, content) }).catch((e) => { console.log(`[FileSyncBridge] failed to re-join doc ${relPath}:`, e) @@ -289,9 +678,8 @@ export class FileSyncBridge { private async writeToDisk(relPath: string, content: string): Promise<void> { const fullPath = join(this.tmpDir, relPath) - const dir = fullPath.substring(0, fullPath.lastIndexOf('/')) + const dir = dirname(fullPath) - // Set write guard this.writesInProgress.add(relPath) try { @@ -301,12 +689,42 @@ export class FileSyncBridge { console.log(`[FileSyncBridge] write error for ${relPath}:`, e) } - // Clear write guard after 150ms (chokidar needs time to fire & be ignored) setTimeout(() => { this.writesInProgress.delete(relPath) }, 150) } + private async deleteFromDisk(relPath: string): Promise<void> { + const fullPath = join(this.tmpDir, relPath) + this.writesInProgress.add(relPath) + try { + await unlink(fullPath) + } catch { /* file may not exist */ } + setTimeout(() => { + this.writesInProgress.delete(relPath) + }, 150) + } + + private async renameOnDisk(oldRelPath: string, newRelPath: string): Promise<void> { + const oldFull = join(this.tmpDir, oldRelPath) + const newFull = join(this.tmpDir, newRelPath) + + this.writesInProgress.add(oldRelPath) + this.writesInProgress.add(newRelPath) + + try { + await mkdir(dirname(newFull), { recursive: true }) + await fsRename(oldFull, newFull) + } catch (e) { + console.log(`[FileSyncBridge] rename error ${oldRelPath} → ${newRelPath}:`, e) + } + + setTimeout(() => { + this.writesInProgress.delete(oldRelPath) + this.writesInProgress.delete(newRelPath) + }, 150) + } + /** Get the temp dir path */ get dir(): string { return this.tmpDir @@ -341,7 +759,6 @@ function diffsToOtOps(diffs: [number, string][]): OtOp[] { break case -1: // DIFF_DELETE ops.push({ d: text, p: pos }) - // Don't advance pos — deletion doesn't move cursor forward break } } @@ -351,7 +768,6 @@ function diffsToOtOps(diffs: [number, string][]): OtOp[] { /** Apply OT ops to a text string */ function applyOpsToText(text: string, ops: OtOp[]): string { - // Sort ops by position descending so we can apply without position shifting const sortedOps = [...ops].sort((a, b) => b.p - a.p) for (const op of sortedOps) { @@ -360,7 +776,6 @@ function applyOpsToText(text: string, ops: OtOp[]): string { } else if (isDelete(op)) { text = text.slice(0, op.p) + text.slice(op.p + op.d.length) } - // Comment ops don't modify text } return text diff --git a/src/main/index.ts b/src/main/index.ts index 7d70238..0d93b17 100644 --- a/src/main/index.ts +++ b/src/main/index.ts @@ -4,7 +4,6 @@ import { app, BrowserWindow, ipcMain, dialog, shell, net } from 'electron' import { join, basename } from 'path' import { readFile, writeFile } from 'fs/promises' -import { createReadStream } from 'fs' import { spawn } from 'child_process' import * as pty from 'node-pty' import { OverleafSocket, type RootFolder, type SubFolder, type JoinDocResult } from './overleafSocket' @@ -650,7 +649,7 @@ ipcMain.handle('ot:connect', async (_e, projectId: string) => { // Set up file sync bridge for bidirectional sync const tmpDir = compilationManager.dir - fileSyncBridge = new FileSyncBridge(overleafSock, tmpDir, docPathMap, pathDocMap, mainWindow!) + fileSyncBridge = new FileSyncBridge(overleafSock, tmpDir, docPathMap, pathDocMap, fileRefs, mainWindow!, projectId, overleafSessionCookie, overleafCsrfToken) fileSyncBridge.start().catch((e) => { console.log('[ot:connect] fileSyncBridge start error:', e) }) |
