summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorhaoyuren <13851610112@163.com>2026-04-25 17:21:27 -0300
committerhaoyuren <13851610112@163.com>2026-04-25 17:21:27 -0300
commit59cb73960ee68a47adbdc05915847cb8d45f795e (patch)
tree1e9a0b0cec3f68ab9d283898232e7408643e7ac1 /src
parent9b5256718c2117511f0253a656bb8cff7410b92a (diff)
Fix Overleaf file tree sync updates
Diffstat (limited to 'src')
-rw-r--r--src/main/fileSyncBridge.ts336
-rw-r--r--src/main/index.ts77
-rw-r--r--src/main/otClient.ts46
-rw-r--r--src/preload/index.ts65
-rw-r--r--src/renderer/src/App.tsx22
-rw-r--r--src/renderer/src/components/Editor.tsx1
-rw-r--r--src/renderer/src/components/FileTree.tsx20
-rw-r--r--src/renderer/src/ot/cmAdapter.ts21
-rw-r--r--src/renderer/src/ot/otClient.ts52
-rw-r--r--src/renderer/src/ot/overleafSync.ts45
-rw-r--r--src/renderer/src/utils/projectEntitySync.ts423
11 files changed, 982 insertions, 126 deletions
diff --git a/src/main/fileSyncBridge.ts b/src/main/fileSyncBridge.ts
index 707d3d8..59be86f 100644
--- a/src/main/fileSyncBridge.ts
+++ b/src/main/fileSyncBridge.ts
@@ -3,7 +3,7 @@
// Bidirectional file sync bridge: temp dir ↔ Overleaf via OT (text) + REST (binary)
import { join, dirname } from 'path'
-import { readFile, writeFile, mkdir, unlink, rename as fsRename, appendFile, readdir } from 'fs/promises'
+import { readFile, writeFile, mkdir, unlink, rename as fsRename, appendFile, readdir, rm } from 'fs/promises'
import { createHash } from 'crypto'
import * as chokidar from 'chokidar'
import { diff_match_patch } from 'diff-match-patch'
@@ -53,6 +53,8 @@ export class FileSyncBridge {
private pathDocMap: Record<string, string> // relPath → docId
private fileRefPathMap: Record<string, string> // fileRefId → relPath
private pathFileRefMap: Record<string, string> // relPath → fileRefId
+ private folderPathMap: Record<string, string> // folderId → relDirPath (no trailing slash)
+ private pathFolderMap: Record<string, string> // relDirPath (no trailing slash) → folderId
private mainWindow: BrowserWindow
private projectId: string
private cookie: string
@@ -89,6 +91,10 @@ export class FileSyncBridge {
this.fileRefPathMap[ref.id] = ref.path
this.pathFileRefMap[ref.path] = ref.id
}
+
+ this.folderPathMap = {}
+ this.pathFolderMap = {}
+ this.rebuildFolderMaps()
}
async start(): Promise<void> {
@@ -122,10 +128,14 @@ export class FileSyncBridge {
this.handleNewFile(args)
} else if (name === 'reciveNewDoc') {
this.handleNewDoc(args)
+ } else if (name === 'reciveNewFolder') {
+ this.handleNewFolder(args)
} else if (name === 'removeEntity') {
this.handleRemoveEntity(args)
} else if (name === 'reciveEntityRename') {
this.handleEntityRename(args)
+ } else if (name === 'reciveEntityMove') {
+ this.handleEntityMove(args)
}
}
this.socket.on('serverEvent', this.serverEventHandler)
@@ -405,6 +415,7 @@ export class FileSyncBridge {
// Register in maps
this.fileRefPathMap[fileRef._id] = relPath
this.pathFileRefMap[relPath] = fileRef._id
+ this.notifyEntityCreated('file', fileRef._id, relPath, fileRef.name, folderId)
// Download to disk
this.downloadBinary(fileRef._id, relPath).catch((e) => {
@@ -433,6 +444,7 @@ export class FileSyncBridge {
// Register in maps
this.docPathMap[doc._id] = relPath
this.pathDocMap[relPath] = doc._id
+ this.notifyEntityCreated('doc', doc._id, relPath, doc.name, folderId)
// Join and sync the new doc
this.socket.joinDoc(doc._id).then((result) => {
@@ -452,6 +464,24 @@ export class FileSyncBridge {
})
}
+ /** Remote: new folder added to project */
+ private handleNewFolder(args: unknown[]): void {
+ // args: [parentFolderId, folder, userId]
+ const parentFolderId = args[0] as string
+ const folder = args[1] as { _id: string; name: string } | undefined
+ if (!folder?._id || !folder?.name) return
+
+ const parentPath = this.folderPathMap[parentFolderId] ?? ''
+ const relPath = parentPath ? `${parentPath}/${folder.name}` : folder.name
+
+ bridgeLog(`[FileSyncBridge] remote new folder: ${relPath} (${folder._id})`)
+
+ this.folderPathMap[folder._id] = relPath
+ this.pathFolderMap[relPath] = folder._id
+ this.createdFolders.set(relPath, folder._id)
+ this.notifyEntityCreated('folder', folder._id, relPath, folder.name, parentFolderId)
+ }
+
/** Remote: entity removed */
private handleRemoveEntity(args: unknown[]): void {
const entityId = args[0] as string
@@ -465,6 +495,7 @@ export class FileSyncBridge {
delete this.pathDocMap[docPath]
this.lastKnownContent.delete(docPath)
this.otClients.delete(entityId)
+ this.notifyEntityRemoved('doc', entityId, docPath)
this.deleteFromDisk(docPath)
return
}
@@ -476,7 +507,18 @@ export class FileSyncBridge {
delete this.fileRefPathMap[entityId]
delete this.pathFileRefMap[filePath]
this.binaryHashes.delete(filePath)
+ this.notifyEntityRemoved('file', entityId, filePath)
this.deleteFromDisk(filePath)
+ return
+ }
+
+ // Check if it's a folder
+ const folderPath = this.folderPathMap[entityId]
+ if (folderPath !== undefined) {
+ bridgeLog(`[FileSyncBridge] remote remove folder: ${folderPath}`)
+ this.removeFolderMappings(entityId)
+ this.notifyEntityRemoved('folder', entityId, folderPath)
+ this.deleteDirFromDisk(folderPath)
}
}
@@ -505,6 +547,7 @@ export class FileSyncBridge {
}
// Rename on disk
+ this.notifyEntityRenamed('doc', entityId, oldDocPath, newPath, newName)
this.renameOnDisk(oldDocPath, newPath)
return
}
@@ -528,12 +571,87 @@ export class FileSyncBridge {
}
// Rename on disk
+ this.notifyEntityRenamed('file', entityId, oldFilePath, newPath, newName)
this.renameOnDisk(oldFilePath, newPath)
+ return
+ }
+
+ // Check if it's a folder
+ const oldFolderPath = this.folderPathMap[entityId]
+ if (oldFolderPath !== undefined) {
+ const parent = dirname(oldFolderPath)
+ const newPath = parent === '.' ? newName : parent + '/' + newName
+ bridgeLog(`[FileSyncBridge] remote rename folder: ${oldFolderPath} → ${newPath}`)
+
+ this.rewriteFolderPath(entityId, newPath)
+ this.notifyEntityRenamed('folder', entityId, oldFolderPath, newPath, newName)
+ this.renameOnDisk(oldFolderPath, newPath)
+ }
+ }
+
+ /** Remote: entity moved */
+ private handleEntityMove(args: unknown[]): void {
+ const entityId = args[0] as string
+ const toFolderId = args[1] as string
+ if (!entityId || !toFolderId) return
+
+ const parentPath = this.folderPathMap[toFolderId] ?? ''
+ const buildNewPath = (oldPath: string) => {
+ const name = oldPath.split('/').filter(Boolean).pop() || oldPath
+ return parentPath ? `${parentPath}/${name}` : name
+ }
+
+ const oldDocPath = this.docPathMap[entityId]
+ if (oldDocPath) {
+ const newPath = buildNewPath(oldDocPath)
+ bridgeLog(`[FileSyncBridge] remote move doc: ${oldDocPath} → ${newPath}`)
+ this.docPathMap[entityId] = newPath
+ delete this.pathDocMap[oldDocPath]
+ this.pathDocMap[newPath] = entityId
+ const content = this.lastKnownContent.get(oldDocPath)
+ if (content !== undefined) {
+ this.lastKnownContent.delete(oldDocPath)
+ this.lastKnownContent.set(newPath, content)
+ }
+ this.notifyEntityMoved('doc', entityId, oldDocPath, newPath, toFolderId)
+ this.renameOnDisk(oldDocPath, newPath)
+ return
+ }
+
+ const oldFilePath = this.fileRefPathMap[entityId]
+ if (oldFilePath) {
+ const newPath = buildNewPath(oldFilePath)
+ bridgeLog(`[FileSyncBridge] remote move file: ${oldFilePath} → ${newPath}`)
+ this.fileRefPathMap[entityId] = newPath
+ delete this.pathFileRefMap[oldFilePath]
+ this.pathFileRefMap[newPath] = entityId
+ const hash = this.binaryHashes.get(oldFilePath)
+ if (hash) {
+ this.binaryHashes.delete(oldFilePath)
+ this.binaryHashes.set(newPath, hash)
+ }
+ this.notifyEntityMoved('file', entityId, oldFilePath, newPath, toFolderId)
+ this.renameOnDisk(oldFilePath, newPath)
+ return
+ }
+
+ const oldFolderPath = this.folderPathMap[entityId]
+ if (oldFolderPath !== undefined) {
+ const newPath = buildNewPath(oldFolderPath)
+ bridgeLog(`[FileSyncBridge] remote move folder: ${oldFolderPath} → ${newPath}`)
+ this.rewriteFolderPath(entityId, newPath)
+ this.notifyEntityMoved('folder', entityId, oldFolderPath, newPath, toFolderId)
+ this.renameOnDisk(oldFolderPath, newPath)
}
}
/** Find folder path prefix from folderId */
private findFolderPath(folderId: string): string {
+ if (folderId in this.folderPathMap) {
+ const path = this.folderPathMap[folderId]
+ return path ? `${path}/` : ''
+ }
+
const projectData = this.socket.projectData
if (projectData) {
const rootFolder = projectData.project.rootFolder?.[0]
@@ -722,7 +840,7 @@ export class FileSyncBridge {
})
}
- private async uploadBinary(relPath: string, fileData: Buffer, overrideFolderId?: string): Promise<void> {
+ private async uploadBinary(relPath: string, fileData: Buffer, overrideFolderId?: string): Promise<string | undefined> {
const fileName = relPath.includes('/') ? relPath.split('/').pop()! : relPath
const folderId = overrideFolderId || this.findFolderIdForPath(relPath)
@@ -767,13 +885,14 @@ export class FileSyncBridge {
const data = JSON.parse(resBody)
if (data.success !== false && !data.error) {
// Upload replaces the file — update our fileRef ID if it changed
- if (data.entity_id && data.entity_id !== this.pathFileRefMap[relPath]) {
+ const entityId = data.entity_id || data.entityId || data.fileRef?._id || data.file?._id
+ if (entityId && entityId !== this.pathFileRefMap[relPath]) {
const oldId = this.pathFileRefMap[relPath]
if (oldId) delete this.fileRefPathMap[oldId]
- this.fileRefPathMap[data.entity_id] = relPath
- this.pathFileRefMap[relPath] = data.entity_id
+ this.fileRefPathMap[entityId] = relPath
+ this.pathFileRefMap[relPath] = entityId
}
- resolve()
+ resolve(entityId)
} else {
reject(new Error(data.error || 'Upload failed'))
}
@@ -793,6 +912,10 @@ export class FileSyncBridge {
const projectData = this.socket.projectData
const rootId = projectData?.project.rootFolder?.[0]?._id || ''
const dir = dirname(relPath)
+ const normalizedDir = dir === '.' ? '' : dir
+ if (normalizedDir in this.pathFolderMap) return this.pathFolderMap[normalizedDir]
+ const cached = this.createdFolders.get(normalizedDir)
+ if (cached) return cached
if (dir === '.') return rootId
// Search inside root folder's children (skip root folder name)
@@ -820,6 +943,162 @@ export class FileSyncBridge {
return null
}
+ private rebuildFolderMaps(): void {
+ this.folderPathMap = {}
+ this.pathFolderMap = {}
+
+ const rootFolder = this.socket.projectData?.project.rootFolder?.[0]
+ if (!rootFolder) return
+
+ this.folderPathMap[rootFolder._id] = ''
+ this.pathFolderMap[''] = rootFolder._id
+
+ const walk = (folders: Array<{ _id: string; name: string; folders?: unknown[] }>, prefix: string) => {
+ for (const folder of folders) {
+ const relPath = prefix ? `${prefix}/${folder.name}` : folder.name
+ this.folderPathMap[folder._id] = relPath
+ this.pathFolderMap[relPath] = folder._id
+ const children = folder.folders as Array<{ _id: string; name: string; folders?: unknown[] }> | undefined
+ if (children) walk(children, relPath)
+ }
+ }
+
+ const children = rootFolder.folders as Array<{ _id: string; name: string; folders?: unknown[] }> | undefined
+ if (children) walk(children, '')
+ }
+
+ private removeFolderMappings(folderId: string): void {
+ const folderPath = this.folderPathMap[folderId]
+ if (folderPath === undefined) return
+ const prefix = folderPath ? `${folderPath}/` : ''
+
+ for (const [docId, relPath] of Object.entries(this.docPathMap)) {
+ if (prefix && relPath.startsWith(prefix)) {
+ delete this.docPathMap[docId]
+ delete this.pathDocMap[relPath]
+ this.lastKnownContent.delete(relPath)
+ this.otClients.delete(docId)
+ }
+ }
+
+ for (const [fileRefId, relPath] of Object.entries(this.fileRefPathMap)) {
+ if (prefix && relPath.startsWith(prefix)) {
+ delete this.fileRefPathMap[fileRefId]
+ delete this.pathFileRefMap[relPath]
+ this.binaryHashes.delete(relPath)
+ }
+ }
+
+ for (const [id, relPath] of Object.entries(this.folderPathMap)) {
+ if (id === folderId || (prefix && relPath.startsWith(prefix))) {
+ delete this.folderPathMap[id]
+ delete this.pathFolderMap[relPath]
+ this.createdFolders.delete(relPath)
+ }
+ }
+ }
+
+ private rewriteFolderPath(folderId: string, newFolderPath: string): void {
+ const oldFolderPath = this.folderPathMap[folderId]
+ if (oldFolderPath === undefined) return
+
+ const oldPrefix = oldFolderPath ? `${oldFolderPath}/` : ''
+ const newPrefix = newFolderPath ? `${newFolderPath}/` : ''
+
+ for (const [docId, relPath] of Object.entries(this.docPathMap)) {
+ if (oldPrefix && relPath.startsWith(oldPrefix)) {
+ const newPath = newPrefix + relPath.slice(oldPrefix.length)
+ this.docPathMap[docId] = newPath
+ delete this.pathDocMap[relPath]
+ this.pathDocMap[newPath] = docId
+ const content = this.lastKnownContent.get(relPath)
+ if (content !== undefined) {
+ this.lastKnownContent.delete(relPath)
+ this.lastKnownContent.set(newPath, content)
+ }
+ }
+ }
+
+ for (const [fileRefId, relPath] of Object.entries(this.fileRefPathMap)) {
+ if (oldPrefix && relPath.startsWith(oldPrefix)) {
+ const newPath = newPrefix + relPath.slice(oldPrefix.length)
+ this.fileRefPathMap[fileRefId] = newPath
+ delete this.pathFileRefMap[relPath]
+ this.pathFileRefMap[newPath] = fileRefId
+ const hash = this.binaryHashes.get(relPath)
+ if (hash) {
+ this.binaryHashes.delete(relPath)
+ this.binaryHashes.set(newPath, hash)
+ }
+ }
+ }
+
+ for (const [id, relPath] of Object.entries(this.folderPathMap)) {
+ if (id === folderId || (oldPrefix && relPath.startsWith(oldPrefix))) {
+ const nextPath = id === folderId
+ ? newFolderPath
+ : newPrefix + relPath.slice(oldPrefix.length)
+ delete this.pathFolderMap[relPath]
+ this.folderPathMap[id] = nextPath
+ this.pathFolderMap[nextPath] = id
+ this.createdFolders.delete(relPath)
+ this.createdFolders.set(nextPath, id)
+ }
+ }
+ }
+
+ private notifyEntityCreated(
+ kind: 'doc' | 'file' | 'folder',
+ entityId: string,
+ relPath: string,
+ name: string,
+ parentFolderId?: string
+ ): void {
+ this.mainWindow.webContents.send('sync:entityCreated', {
+ kind,
+ entityId,
+ relPath,
+ name,
+ parentFolderId
+ })
+ }
+
+ private notifyEntityRemoved(kind: 'doc' | 'file' | 'folder', entityId: string, relPath: string): void {
+ this.mainWindow.webContents.send('sync:entityRemoved', { kind, entityId, relPath })
+ }
+
+ private notifyEntityRenamed(
+ kind: 'doc' | 'file' | 'folder',
+ entityId: string,
+ oldPath: string,
+ newPath: string,
+ newName: string
+ ): void {
+ this.mainWindow.webContents.send('sync:entityRenamed', {
+ kind,
+ entityId,
+ oldPath,
+ newPath,
+ newName
+ })
+ }
+
+ private notifyEntityMoved(
+ kind: 'doc' | 'file' | 'folder',
+ entityId: string,
+ oldPath: string,
+ newPath: string,
+ parentFolderId: string
+ ): void {
+ this.mainWindow.webContents.send('sync:entityMoved', {
+ kind,
+ entityId,
+ oldPath,
+ newPath,
+ parentFolderId
+ })
+ }
+
// ── Send OT ops to Overleaf (for non-editor docs) ───────────
private sendOps(docId: string, ops: OtOp[], version: number): void {
@@ -957,6 +1236,17 @@ export class FileSyncBridge {
}, 150)
}
+ private async deleteDirFromDisk(relPath: string): Promise<void> {
+ const fullPath = join(this.tmpDir, relPath)
+ this.writesInProgress.add(relPath)
+ try {
+ await rm(fullPath, { recursive: true, force: true })
+ } catch { /* directory may not exist */ }
+ setTimeout(() => {
+ this.writesInProgress.delete(relPath)
+ }, 150)
+ }
+
private async renameOnDisk(oldRelPath: string, newRelPath: string): Promise<void> {
const oldFull = join(this.tmpDir, oldRelPath)
const newFull = join(this.tmpDir, newRelPath)
@@ -1112,8 +1402,9 @@ export class FileSyncBridge {
this.lastKnownContent.set(relPath, serverContent)
}
- // Notify renderer about the new doc
- this.mainWindow.webContents.send('sync:newDoc', { docId, relPath })
+ // Notify renderer about the new doc. The server will also echo
+ // reciveNewDoc, but pendingCreates makes us skip that duplicate.
+ this.notifyEntityCreated('doc', docId, relPath, fileName, folderId)
}
/** Upload a new binary file to Overleaf */
@@ -1125,19 +1416,26 @@ export class FileSyncBridge {
const folderId = await this.ensureFolderExists(dir === '.' ? '' : dir)
bridgeLog(`[FileSyncBridge] uploading new binary: ${relPath} (${fileData.length} bytes)`)
- await this.uploadBinary(relPath, fileData, folderId)
+ const fileRefId = await this.uploadBinary(relPath, fileData, folderId)
this.binaryHashes.set(relPath, createHash('sha1').update(fileData).digest('hex'))
// Notify renderer
- this.mainWindow.webContents.send('sync:newDoc', { docId: null, relPath })
+ if (fileRefId) {
+ this.notifyEntityCreated('file', fileRefId, relPath, relPath.split('/').pop() || relPath, folderId)
+ }
}
/** Ensure a folder path exists on Overleaf, creating intermediaries as needed */
private async ensureFolderExists(dirPath: string): Promise<string> {
+ dirPath = dirPath.replace(/\/+$/, '')
+
if (!dirPath || dirPath === '.') {
return this.socket.projectData?.project.rootFolder?.[0]?._id || ''
}
+ const known = this.pathFolderMap[dirPath]
+ if (known) return known
+
// Check cache
const cached = this.createdFolders.get(dirPath)
if (cached) return cached
@@ -1170,6 +1468,8 @@ export class FileSyncBridge {
if (result.ok && result.data?._id) {
const folderId = result.data._id as string
this.createdFolders.set(dirPath, folderId)
+ this.folderPathMap[folderId] = dirPath
+ this.pathFolderMap[dirPath] = folderId
bridgeLog(`[FileSyncBridge] created folder "${folderName}" (${folderId})`)
return folderId
}
@@ -1219,6 +1519,16 @@ export class FileSyncBridge {
return this.lastKnownContent.get(relPath)
}
+ /** Get all synced text docs (used by compilation manager) */
+ getAllDocContents(): Array<{ path: string; content: string }> {
+ return Array.from(this.lastKnownContent.entries()).map(([path, content]) => ({ path, content }))
+ }
+
+ /** Get all known binary file refs (used by compilation manager) */
+ getFileRefs(): Array<{ id: string; path: string }> {
+ return Object.entries(this.fileRefPathMap).map(([id, path]) => ({ id, path }))
+ }
+
/** Check if a doc's content is known */
hasDoc(relPath: string): boolean {
return this.lastKnownContent.has(relPath)
@@ -1252,9 +1562,9 @@ function diffsToOtOps(diffs: [number, string][]): OtOp[] {
/** Apply OT ops to a text string */
function applyOpsToText(text: string, ops: OtOp[]): string {
- const sortedOps = [...ops].sort((a, b) => b.p - a.p)
-
- for (const op of sortedOps) {
+ // ShareJS text operation components are sequential. Each component's
+ // position is relative to the document after earlier components ran.
+ for (const op of ops) {
if (isInsert(op)) {
text = text.slice(0, op.p) + op.i + text.slice(op.p)
} else if (isDelete(op)) {
diff --git a/src/main/index.ts b/src/main/index.ts
index d2c3d64..7c26a6c 100644
--- a/src/main/index.ts
+++ b/src/main/index.ts
@@ -752,12 +752,13 @@ ipcMain.handle('ot:connect', async (_e, projectId: string) => {
sendToRenderer('ot:connectionState', state)
})
- // otUpdateApplied: server acknowledges our op (ack signal for OT client)
- // Only ack when there's no 'op' field — presence of 'op' means it's a remote update, not our ack
+ // otUpdateApplied: server acknowledges our op with a no-op update on
+ // official Overleaf, but some deployments echo own-source ops instead.
overleafSock.on('serverEvent', (name: string, args: unknown[]) => {
if (name === 'otUpdateApplied') {
- const update = args[0] as { doc?: string; op?: unknown[]; v?: number } | undefined
- if (update?.doc && !update.op) {
+ const update = args[0] as { doc?: string; op?: unknown[]; v?: number; meta?: { source?: string } } | undefined
+ const isOwnSource = update?.meta?.source && update.meta.source === overleafSock?.publicId
+ if (update?.doc && (!update.op || isOwnSource)) {
sendToRenderer('ot:ack', { docId: update.doc })
}
} else if (name === 'otUpdateError') {
@@ -1063,6 +1064,34 @@ ipcMain.handle('ot:disconnect', async () => {
// Track per-doc event handlers for cleanup on leaveDoc
const docEventHandlers = new Map<string, (name: string, args: unknown[]) => void>()
+function attachRendererDoc(docId: string): void {
+ if (!overleafSock) return
+
+ // Notify bridge that editor is taking over this doc
+ fileSyncBridge?.addEditorDoc(docId)
+
+ // Remove existing handler if re-attaching
+ const existingHandler = docEventHandlers.get(docId)
+ if (existingHandler) overleafSock.removeListener('serverEvent', existingHandler)
+
+ // Set up relay for remote ops on this doc
+ const handler = (name: string, args: unknown[]) => {
+ if (name === 'otUpdateApplied') {
+ const update = args[0] as { doc?: string; op?: unknown[]; v?: number; meta?: { source?: string } } | undefined
+ const isOwnSource = update?.meta?.source && update.meta.source === overleafSock?.publicId
+ if (update?.doc === docId && update.op && !isOwnSource) {
+ sendToRenderer('ot:remoteOp', {
+ docId: update.doc,
+ ops: update.op,
+ version: update.v
+ })
+ }
+ }
+ }
+ docEventHandlers.set(docId, handler)
+ overleafSock.on('serverEvent', handler)
+}
+
ipcMain.handle('ot:joinDoc', async (_e, docId: string) => {
if (!overleafSock) return { success: false, message: 'not_connected' }
@@ -1078,28 +1107,7 @@ ipcMain.handle('ot:joinDoc', async (_e, docId: string) => {
}
}
- // Notify bridge that editor is taking over this doc
- fileSyncBridge?.addEditorDoc(docId)
-
- // Remove existing handler if rejoining
- const existingHandler = docEventHandlers.get(docId)
- if (existingHandler) overleafSock.removeListener('serverEvent', existingHandler)
-
- // Set up relay for remote ops on this doc
- const handler = (name: string, args: unknown[]) => {
- if (name === 'otUpdateApplied') {
- const update = args[0] as { doc?: string; op?: unknown[]; v?: number } | undefined
- if (update?.doc === docId && update.op) {
- sendToRenderer('ot:remoteOp', {
- docId: update.doc,
- ops: update.op,
- version: update.v
- })
- }
- }
- }
- docEventHandlers.set(docId, handler)
- overleafSock.on('serverEvent', handler)
+ attachRendererDoc(docId)
return {
success: true,
@@ -1113,6 +1121,10 @@ ipcMain.handle('ot:joinDoc', async (_e, docId: string) => {
}
})
+ipcMain.handle('ot:attachDoc', async (_e, docId: string) => {
+ attachRendererDoc(docId)
+})
+
ipcMain.handle('ot:leaveDoc', async (_e, docId: string) => {
if (!overleafSock) return
try {
@@ -1438,18 +1450,14 @@ ipcMain.handle('overleaf:socketCompile', async (_e, mainTexRelPath: string) => {
return { success: false, log: 'No compilation manager or not connected', pdfPath: '' }
}
- const { docPathMap, fileRefs } = walkRootFolder(overleafSock.projectData.project.rootFolder)
-
// Bridge already keeps all docs synced to disk. Sync content to compilation manager.
if (fileSyncBridge) {
- for (const [docId, relPath] of Object.entries(docPathMap)) {
- const content = fileSyncBridge.getDocContent(relPath)
- if (content !== undefined) {
- compilationManager.setDocContent(relPath, content)
- }
+ for (const { path, content } of fileSyncBridge.getAllDocContents()) {
+ compilationManager.setDocContent(path, content)
}
} else {
// Fallback: fetch docs from socket if bridge isn't available
+ const { docPathMap } = walkRootFolder(overleafSock.projectData.project.rootFolder)
const allDocIds = Object.keys(docPathMap)
for (const docId of allDocIds) {
const relPath = docPathMap[docId]
@@ -1469,6 +1477,9 @@ ipcMain.handle('overleaf:socketCompile', async (_e, mainTexRelPath: string) => {
}
// Download all binary files (images, .bst, etc.)
+ const fileRefs = fileSyncBridge
+ ? fileSyncBridge.getFileRefs()
+ : walkRootFolder(overleafSock.projectData.project.rootFolder).fileRefs
await compilationManager.syncBinaries(fileRefs)
return compilationManager.compile(mainTexRelPath, (data) => {
diff --git a/src/main/otClient.ts b/src/main/otClient.ts
index 2917bcc..9d373bd 100644
--- a/src/main/otClient.ts
+++ b/src/main/otClient.ts
@@ -25,10 +25,16 @@ interface OtState {
version: number
}
+interface QueuedRemoteUpdate {
+ ops: OtOp[]
+ version: number
+}
+
export class OtClient {
private state: OtState
private sendFn: SendFn
private applyFn: ApplyFn
+ private queuedRemoteUpdates: QueuedRemoteUpdate[] = []
constructor(version: number, sendFn: SendFn, applyFn: ApplyFn) {
this.state = { name: 'synchronized', inflight: null, buffer: null, version }
@@ -115,6 +121,8 @@ export class OtClient {
// to synchronized, the second arrives when we're already there.
break
}
+
+ this.processQueuedRemoteUpdates()
}
/**
@@ -122,15 +130,27 @@ export class OtClient {
* Transform against inflight/buffered ops before applying.
*/
onRemoteOps(ops: OtOp[], newVersion: number) {
- // Stale message detection (matching Overleaf's ShareJS):
- // if the server version is behind our version, we already processed this.
+ // ShareJS update.v is the document version before the op is applied.
+ // Drop duplicates and queue out-of-order messages until their base version
+ // catches up, matching Overleaf's in-order processing.
if (newVersion < this.state.version) {
return
}
+ if (newVersion > this.state.version) {
+ this.queueRemoteUpdate(ops, newVersion)
+ return
+ }
+
+ this.applyRemoteOps(ops, newVersion)
+ this.processQueuedRemoteUpdates()
+ }
+
+ private applyRemoteOps(ops: OtOp[], newVersion: number) {
+ const nextVersion = newVersion + 1
switch (this.state.name) {
case 'synchronized':
- this.state = { ...this.state, version: newVersion }
+ this.state = { ...this.state, version: nextVersion }
this.applyFn(ops)
break
@@ -139,7 +159,7 @@ export class OtClient {
this.state = {
...this.state,
inflight: transformedInflight,
- version: newVersion
+ version: nextVersion
}
this.applyFn(transformedRemote)
break
@@ -152,7 +172,7 @@ export class OtClient {
...this.state,
inflight: inflightAfterRemote,
buffer: bufferAfterRemote,
- version: newVersion
+ version: nextVersion
}
this.applyFn(remoteAfterBuffer)
break
@@ -160,7 +180,23 @@ export class OtClient {
}
}
+ private queueRemoteUpdate(ops: OtOp[], version: number) {
+ if (this.queuedRemoteUpdates.some((update) => update.version === version)) return
+ this.queuedRemoteUpdates.push({ ops, version })
+ this.queuedRemoteUpdates.sort((a, b) => a.version - b.version)
+ }
+
+ private processQueuedRemoteUpdates() {
+ let nextIndex = this.queuedRemoteUpdates.findIndex((update) => update.version === this.state.version)
+ while (nextIndex !== -1) {
+ const [next] = this.queuedRemoteUpdates.splice(nextIndex, 1)
+ this.applyRemoteOps(next.ops, next.version)
+ nextIndex = this.queuedRemoteUpdates.findIndex((update) => update.version === this.state.version)
+ }
+ }
+
reset(version: number) {
this.state = { name: 'synchronized', inflight: null, buffer: null, version }
+ this.queuedRemoteUpdates = []
}
}
diff --git a/src/preload/index.ts b/src/preload/index.ts
index b1db391..05dc893 100644
--- a/src/preload/index.ts
+++ b/src/preload/index.ts
@@ -92,6 +92,7 @@ const api = {
message?: string
}>,
otLeaveDoc: (docId: string) => ipcRenderer.invoke('ot:leaveDoc', docId),
+ otAttachDoc: (docId: string) => ipcRenderer.invoke('ot:attachDoc', docId),
otSendOp: (docId: string, ops: unknown[], version: number, hash: string) =>
ipcRenderer.invoke('ot:sendOp', docId, ops, version, hash),
otFetchAllCommentContexts: () =>
@@ -184,6 +185,70 @@ const api = {
ipcRenderer.on('sync:newDoc', handler)
return () => ipcRenderer.removeListener('sync:newDoc', handler)
},
+ onSyncEntityCreated: (cb: (data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ relPath: string
+ name: string
+ parentFolderId?: string
+ }) => void) => {
+ const handler = (_e: Electron.IpcRendererEvent, data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ relPath: string
+ name: string
+ parentFolderId?: string
+ }) => cb(data)
+ ipcRenderer.on('sync:entityCreated', handler)
+ return () => ipcRenderer.removeListener('sync:entityCreated', handler)
+ },
+ onSyncEntityRemoved: (cb: (data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ relPath: string
+ }) => void) => {
+ const handler = (_e: Electron.IpcRendererEvent, data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ relPath: string
+ }) => cb(data)
+ ipcRenderer.on('sync:entityRemoved', handler)
+ return () => ipcRenderer.removeListener('sync:entityRemoved', handler)
+ },
+ onSyncEntityRenamed: (cb: (data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ oldPath: string
+ newPath: string
+ newName: string
+ }) => void) => {
+ const handler = (_e: Electron.IpcRendererEvent, data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ oldPath: string
+ newPath: string
+ newName: string
+ }) => cb(data)
+ ipcRenderer.on('sync:entityRenamed', handler)
+ return () => ipcRenderer.removeListener('sync:entityRenamed', handler)
+ },
+ onSyncEntityMoved: (cb: (data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ oldPath: string
+ newPath: string
+ parentFolderId: string
+ }) => void) => {
+ const handler = (_e: Electron.IpcRendererEvent, data: {
+ kind: 'doc' | 'file' | 'folder'
+ entityId: string
+ oldPath: string
+ newPath: string
+ parentFolderId: string
+ }) => cb(data)
+ ipcRenderer.on('sync:entityMoved', handler)
+ return () => ipcRenderer.removeListener('sync:entityMoved', handler)
+ },
// Cursor tracking
cursorUpdate: (docId: string, row: number, column: number) =>
diff --git a/src/renderer/src/App.tsx b/src/renderer/src/App.tsx
index 1905e79..808176b 100644
--- a/src/renderer/src/App.tsx
+++ b/src/renderer/src/App.tsx
@@ -18,6 +18,12 @@ import SearchPanel from './components/SearchPanel'
import StatusBar from './components/StatusBar'
import type { OverleafDocSync } from './ot/overleafSync'
import { colorForUser, type RemoteCursor } from './extensions/remoteCursors'
+import {
+ applyEntityCreated,
+ applyEntityMoved,
+ applyEntityRemoved,
+ applyEntityRenamed,
+} from './utils/projectEntitySync'
export const activeDocSyncs = new Map<string, OverleafDocSync>()
@@ -106,12 +112,11 @@ export default function App() {
if (sync) sync.replaceContent(data.content, data.baseContent)
})
- // Listen for new docs created locally (e.g. by Claude Code)
- const unsubNewDoc = window.api.onSyncNewDoc((data) => {
- if (data.docId) {
- useAppStore.getState().addDocPath(data.docId, data.relPath)
- }
- })
+ // Keep the file tree in sync with Overleaf project-entity socket events.
+ const unsubEntityCreated = window.api.onSyncEntityCreated(applyEntityCreated)
+ const unsubEntityRemoved = window.api.onSyncEntityRemoved(applyEntityRemoved)
+ const unsubEntityRenamed = window.api.onSyncEntityRenamed(applyEntityRenamed)
+ const unsubEntityMoved = window.api.onSyncEntityMoved(applyEntityMoved)
// Listen for initial comment data (threads + contexts) from background fetch on connect
const unsubInitThreads = window.api.onCommentsInitThreads?.((data) => {
@@ -202,7 +207,10 @@ export default function App() {
unsubState()
unsubRejoined()
unsubExternalEdit()
- unsubNewDoc()
+ unsubEntityCreated()
+ unsubEntityRemoved()
+ unsubEntityRenamed()
+ unsubEntityMoved()
unsubInitThreads?.()
unsubInitContexts?.()
unsubCommentsEvent?.()
diff --git a/src/renderer/src/components/Editor.tsx b/src/renderer/src/components/Editor.tsx
index 97adf1b..b8075f7 100644
--- a/src/renderer/src/components/Editor.tsx
+++ b/src/renderer/src/components/Editor.tsx
@@ -218,6 +218,7 @@ export default function Editor() {
const docId = pathDocMap[activeTab]
const version = docId ? docVersions[docId] : undefined
if (docId && version !== undefined) {
+ window.api.otAttachDoc(docId)
const docSync = new OverleafDocSync(docId, version)
docSyncRef.current = docSync
activeDocSyncs.set(docId, docSync)
diff --git a/src/renderer/src/components/FileTree.tsx b/src/renderer/src/components/FileTree.tsx
index a95b0c3..1b9ce5e 100644
--- a/src/renderer/src/components/FileTree.tsx
+++ b/src/renderer/src/components/FileTree.tsx
@@ -176,8 +176,6 @@ export default function FileTree() {
const result = await window.api.overleafRenameEntity(projectId, entityType, entityId, newName.trim())
if (result.success) {
useAppStore.getState().setStatusMessage(`Renamed to ${newName.trim()}`)
- // Reconnect to refresh file tree
- await reconnectProject(projectId)
} else {
useAppStore.getState().setStatusMessage(`Rename failed: ${result.message}`)
}
@@ -210,7 +208,6 @@ export default function FileTree() {
const result = await window.api.overleafDeleteEntity(projectId, entityType, entityId)
if (result.success) {
useAppStore.getState().setStatusMessage(`Deleted ${node.name}`)
- await reconnectProject(projectId)
} else {
useAppStore.getState().setStatusMessage(`Delete failed: ${result.message}`)
}
@@ -233,7 +230,6 @@ export default function FileTree() {
const result = await window.api.overleafCreateDoc(projectId, parentId, name.trim())
if (result.success) {
useAppStore.getState().setStatusMessage(`Created ${name.trim()}`)
- await reconnectProject(projectId)
} else {
useAppStore.getState().setStatusMessage(`Create failed: ${result.message}`)
}
@@ -256,7 +252,6 @@ export default function FileTree() {
const result = await window.api.overleafCreateFolder(projectId, parentId, name.trim())
if (result.success) {
useAppStore.getState().setStatusMessage(`Created folder ${name.trim()}`)
- await reconnectProject(projectId)
} else {
useAppStore.getState().setStatusMessage(`Create failed: ${result.message}`)
}
@@ -304,8 +299,6 @@ export default function FileTree() {
}
}
- // Refresh file tree
- await reconnectProject(projectId)
}, [])
const handleOpenInOverleaf = () => {
@@ -376,16 +369,3 @@ export default function FileTree() {
</div>
)
}
-
-/** Reconnect to refresh the file tree after a file operation */
-async function reconnectProject(projectId: string) {
- const result = await window.api.otConnect(projectId)
- if (result.success) {
- const store = useAppStore.getState()
- if (result.files) store.setFiles(result.files as any)
- if (result.project) store.setOverleafProject(result.project)
- if (result.docPathMap && result.pathDocMap) store.setDocMaps(result.docPathMap, result.pathDocMap)
- if (result.fileRefs) store.setFileRefs(result.fileRefs)
- if (result.rootFolderId) store.setRootFolderId(result.rootFolderId)
- }
-}
diff --git a/src/renderer/src/ot/cmAdapter.ts b/src/renderer/src/ot/cmAdapter.ts
index 87c23bf..640217e 100644
--- a/src/renderer/src/ot/cmAdapter.ts
+++ b/src/renderer/src/ot/cmAdapter.ts
@@ -47,25 +47,14 @@ export function changeSetToOtOps(changes: ChangeSet, oldDoc: Text): OtOp[] {
*/
export function otOpsToChangeSpec(ops: OtOp[]): ChangeSpec[] {
const specs: ChangeSpec[] = []
- // Sort ops by position (process in order). Inserts before deletes at same position.
- const sorted = [...ops].filter(op => isInsert(op) || isDelete(op)).sort((a, b) => {
- if (a.p !== b.p) return a.p - b.p
- // Inserts before deletes at same position
- if (isInsert(a) && isDelete(b)) return -1
- if (isDelete(a) && isInsert(b)) return 1
- return 0
- })
-
- // We need to adjust positions as we apply ops sequentially
- let posShift = 0
- for (const op of sorted) {
+ // Overleaf/ShareJS text ops are sequential: every component position is
+ // relative to the document after previous components in the same op.
+ for (const op of ops) {
if (isInsert(op)) {
- specs.push({ from: op.p + posShift, insert: op.i })
- posShift += op.i.length
+ specs.push({ from: op.p, insert: op.i })
} else if (isDelete(op)) {
- specs.push({ from: op.p + posShift, to: op.p + posShift + op.d.length })
- posShift -= op.d.length
+ specs.push({ from: op.p, to: op.p + op.d.length })
}
}
diff --git a/src/renderer/src/ot/otClient.ts b/src/renderer/src/ot/otClient.ts
index 4a0a873..0395c70 100644
--- a/src/renderer/src/ot/otClient.ts
+++ b/src/renderer/src/ot/otClient.ts
@@ -8,10 +8,16 @@ import { transformOps } from './transform'
export type SendFn = (ops: OtOp[], version: number) => void
export type ApplyFn = (ops: OtOp[]) => void
+interface QueuedRemoteUpdate {
+ ops: OtOp[]
+ version: number
+}
+
export class OtClient {
private state: OtState
private sendFn: SendFn
private applyFn: ApplyFn
+ private queuedRemoteUpdates: QueuedRemoteUpdate[] = []
constructor(version: number, sendFn: SendFn, applyFn: ApplyFn) {
this.state = { name: 'synchronized', inflight: null, buffer: null, version }
@@ -88,18 +94,38 @@ export class OtClient {
break
case 'synchronized':
- // Unexpected ack in synchronized state, ignore
- console.warn('[OtClient] unexpected ack in synchronized state')
+ // Duplicate ack. The server can send both own-source echoes and
+ // explicit no-op acks depending on deployment/version.
break
}
+
+ this.processQueuedRemoteUpdates()
}
/** Called when server sends a remote operation */
onRemoteOps(ops: OtOp[], newVersion: number) {
+ // ShareJS update.v is the document version before the op is applied.
+ // Drop duplicates and queue out-of-order messages until their base version
+ // catches up, matching Overleaf's in-order processing.
+ if (newVersion < this.state.version) {
+ return
+ }
+ if (newVersion > this.state.version) {
+ this.queueRemoteUpdate(ops, newVersion)
+ return
+ }
+
+ this.applyRemoteOps(ops, newVersion)
+ this.processQueuedRemoteUpdates()
+ }
+
+ private applyRemoteOps(ops: OtOp[], newVersion: number) {
+ const nextVersion = newVersion + 1
+
switch (this.state.name) {
case 'synchronized':
// Apply directly
- this.state = { ...this.state, version: newVersion }
+ this.state = { ...this.state, version: nextVersion }
this.applyFn(ops)
break
@@ -109,7 +135,7 @@ export class OtClient {
this.state = {
...this.state,
inflight: transformedInflight,
- version: newVersion
+ version: nextVersion
}
this.applyFn(transformedRemote)
break
@@ -123,7 +149,7 @@ export class OtClient {
...this.state,
inflight: inflightAfterRemote,
buffer: bufferAfterRemote,
- version: newVersion
+ version: nextVersion
}
this.applyFn(remoteAfterBuffer)
break
@@ -131,8 +157,24 @@ export class OtClient {
}
}
+ private queueRemoteUpdate(ops: OtOp[], version: number) {
+ if (this.queuedRemoteUpdates.some((update) => update.version === version)) return
+ this.queuedRemoteUpdates.push({ ops, version })
+ this.queuedRemoteUpdates.sort((a, b) => a.version - b.version)
+ }
+
+ private processQueuedRemoteUpdates() {
+ let nextIndex = this.queuedRemoteUpdates.findIndex((update) => update.version === this.state.version)
+ while (nextIndex !== -1) {
+ const [next] = this.queuedRemoteUpdates.splice(nextIndex, 1)
+ this.applyRemoteOps(next.ops, next.version)
+ nextIndex = this.queuedRemoteUpdates.findIndex((update) => update.version === this.state.version)
+ }
+ }
+
/** Reset to a known version (e.g. after reconnect) */
reset(version: number) {
this.state = { name: 'synchronized', inflight: null, buffer: null, version }
+ this.queuedRemoteUpdates = []
}
}
diff --git a/src/renderer/src/ot/overleafSync.ts b/src/renderer/src/ot/overleafSync.ts
index 4a6deda..3f4b194 100644
--- a/src/renderer/src/ot/overleafSync.ts
+++ b/src/renderer/src/ot/overleafSync.ts
@@ -85,13 +85,15 @@ export class OverleafDocSync {
const specs = otOpsToChangeSpec(ops)
if (specs.length === 0) return
- this.view.dispatch({
- changes: specs,
- annotations: [
- remoteUpdateAnnotation.of(true),
- Transaction.addToHistory.of(false)
- ]
- })
+ for (const changes of specs) {
+ this.view.dispatch({
+ changes,
+ annotations: [
+ remoteUpdateAnnotation.of(true),
+ Transaction.addToHistory.of(false)
+ ]
+ })
+ }
}
/** Called when server acknowledges our ops */
@@ -126,32 +128,21 @@ export class OverleafDocSync {
}
/** Replace entire editor content with new content (external edit from disk).
- * If baseContent is provided, does a three-way merge to preserve concurrent
- * remote changes that arrived while the disk edit was being debounced. */
- replaceContent(newContent: string, baseContent?: string) {
+ * Computes a minimal diff from the current editor state to the new content
+ * and dispatches it as a local transaction (which the OT extension picks up). */
+ replaceContent(newContent: string, _baseContent?: string) {
if (!this.view) return
const currentContent = this.view.state.doc.toString()
if (currentContent === newContent) return
+ // Direct two-way diff: always diff current editor state → new disk content.
+ // We intentionally do NOT three-way merge with baseContent because the bridge's
+ // lastKnownContent (used as baseContent) races with onEditorContentChanged and
+ // frequently doesn't match the editor's actual state, causing patch_apply to
+ // produce garbled text when it "succeeds" via fuzzy matching.
const dmp = new diff_match_patch()
- let targetContent = newContent
-
- // Three-way merge: if editor has diverged from the base (due to remote edits),
- // apply only the disk changes (base→new) as patches on top of current editor state
- if (baseContent !== undefined && currentContent !== baseContent) {
- const patches = dmp.patch_make(baseContent, newContent)
- const [merged, results] = dmp.patch_apply(patches, currentContent)
- if (results.length > 0 && results.every(r => r)) {
- targetContent = merged
- }
- // If patch failed, fall through to two-way diff (full replacement)
- }
-
- if (currentContent === targetContent) return
-
- // Use diff to compute minimal changes so comment range positions remap correctly
- const diffs = dmp.diff_main(currentContent, targetContent)
+ const diffs = dmp.diff_main(currentContent, newContent)
dmp.diff_cleanupEfficiency(diffs)
const changes: ChangeSpec[] = []
diff --git a/src/renderer/src/utils/projectEntitySync.ts b/src/renderer/src/utils/projectEntitySync.ts
new file mode 100644
index 0000000..e328438
--- /dev/null
+++ b/src/renderer/src/utils/projectEntitySync.ts
@@ -0,0 +1,423 @@
+// Copyright (c) 2026 Yuren Hao
+// Licensed under AGPL-3.0 - see LICENSE file
+
+import { useAppStore, type FileNode } from '../stores/appStore'
+
+export type SyncEntityKind = 'doc' | 'file' | 'folder'
+
+export interface SyncEntityCreated {
+ kind: SyncEntityKind
+ entityId: string
+ relPath: string
+ name: string
+ parentFolderId?: string
+}
+
+export interface SyncEntityRemoved {
+ kind: SyncEntityKind
+ entityId: string
+ relPath: string
+}
+
+export interface SyncEntityRenamed {
+ kind: SyncEntityKind
+ entityId: string
+ oldPath: string
+ newPath: string
+ newName: string
+}
+
+export interface SyncEntityMoved {
+ kind: SyncEntityKind
+ entityId: string
+ oldPath: string
+ newPath: string
+ parentFolderId: string
+}
+
+function stripTrailingSlash(path: string): string {
+ return path.replace(/^\/+/, '').replace(/\/+$/, '')
+}
+
+function normalizePath(path: string, kind: SyncEntityKind): string {
+ const stripped = stripTrailingSlash(path)
+ if (kind === 'folder') return stripped ? `${stripped}/` : ''
+ return stripped
+}
+
+function pathParts(path: string, kind: SyncEntityKind): string[] {
+ const normalized = normalizePath(path, kind)
+ return stripTrailingSlash(normalized).split('/').filter(Boolean)
+}
+
+function pathMatchesKind(path: string, kind: SyncEntityKind, targetPath: string): boolean {
+ return normalizePath(path, kind) === normalizePath(targetPath, kind)
+}
+
+function isNodeMatch(node: FileNode, kind: SyncEntityKind, entityId: string, relPath?: string): boolean {
+ if (kind === 'doc') {
+ return node.docId === entityId || (!!relPath && !node.isDir && pathMatchesKind(node.path, kind, relPath))
+ }
+ if (kind === 'file') {
+ return node.fileRefId === entityId || (!!relPath && !node.isDir && pathMatchesKind(node.path, kind, relPath))
+ }
+ return node.folderId === entityId || (!!relPath && node.isDir && pathMatchesKind(node.path, kind, relPath))
+}
+
+function withNode(nodes: FileNode[], index: number, node: FileNode): FileNode[] {
+ if (index === -1) return [...nodes, node]
+ return nodes.map((existing, i) => (i === index ? node : existing))
+}
+
+function createLeafNode(entity: SyncEntityCreated, existing?: FileNode): FileNode {
+ const path = normalizePath(entity.relPath, entity.kind)
+ const base: FileNode = {
+ name: entity.name,
+ path,
+ isDir: entity.kind === 'folder'
+ }
+
+ if (entity.kind === 'doc') {
+ base.docId = entity.entityId
+ } else if (entity.kind === 'file') {
+ base.fileRefId = entity.entityId
+ } else {
+ base.folderId = entity.entityId
+ base.children = existing?.children ?? []
+ }
+
+ return base
+}
+
+function upsertFileTreeNode(files: FileNode[], entity: SyncEntityCreated): FileNode[] {
+ const parts = pathParts(entity.relPath, entity.kind)
+ if (parts.length === 0) return files
+ const parentFolderPath = entity.kind === 'folder' || parts.length <= 1
+ ? ''
+ : `${parts.slice(0, -1).join('/')}/`
+
+ const upsert = (nodes: FileNode[], depth: number, prefix: string): FileNode[] => {
+ const name = parts[depth]
+ const isLeaf = depth === parts.length - 1
+ const currentPath = isLeaf
+ ? normalizePath(entity.relPath, entity.kind)
+ : `${prefix}${name}/`
+
+ const index = nodes.findIndex((node) => (
+ node.path === currentPath || (isLeaf && isNodeMatch(node, entity.kind, entity.entityId, entity.relPath))
+ ))
+ const existing = index >= 0 ? nodes[index] : undefined
+
+ if (isLeaf) {
+ return withNode(nodes, index, createLeafNode(entity, existing))
+ }
+
+ const folderNode: FileNode = {
+ name,
+ path: currentPath,
+ isDir: true,
+ folderId: currentPath === parentFolderPath ? entity.parentFolderId : existing?.folderId,
+ children: upsert(existing?.children ?? [], depth + 1, currentPath)
+ }
+
+ return withNode(nodes, index, folderNode)
+ }
+
+ return upsert(files, 0, '')
+}
+
+function removeFileTreeNode(files: FileNode[], entity: SyncEntityRemoved): FileNode[] {
+ return files.flatMap((node) => {
+ if (isNodeMatch(node, entity.kind, entity.entityId, entity.relPath)) return []
+ if (node.children) {
+ return [{ ...node, children: removeFileTreeNode(node.children, entity) }]
+ }
+ return [node]
+ })
+}
+
+function rewriteNodePath(node: FileNode, oldPath: string, newPath: string): FileNode {
+ const oldPrefix = normalizePath(oldPath, 'folder')
+ const newPrefix = normalizePath(newPath, 'folder')
+ const rewrittenPath = node.path.startsWith(oldPrefix)
+ ? newPrefix + node.path.slice(oldPrefix.length)
+ : node.path
+
+ return {
+ ...node,
+ path: rewrittenPath,
+ children: node.children?.map((child) => rewriteNodePath(child, oldPath, newPath))
+ }
+}
+
+function renameFileTreeNode(files: FileNode[], entity: SyncEntityRenamed): FileNode[] {
+ return files.map((node) => {
+ if (isNodeMatch(node, entity.kind, entity.entityId, entity.oldPath)) {
+ const nextPath = normalizePath(entity.newPath, entity.kind)
+ return {
+ ...node,
+ name: entity.newName,
+ path: nextPath,
+ children: node.isDir
+ ? node.children?.map((child) => rewriteNodePath(child, entity.oldPath, entity.newPath))
+ : node.children
+ }
+ }
+ if (node.children) {
+ return { ...node, children: renameFileTreeNode(node.children, entity) }
+ }
+ return node
+ })
+}
+
+function extractFileTreeNode(
+ files: FileNode[],
+ kind: SyncEntityKind,
+ entityId: string,
+ oldPath: string
+): { files: FileNode[]; node: FileNode | null } {
+ let found: FileNode | null = null
+ const nextFiles = files.flatMap((node) => {
+ if (isNodeMatch(node, kind, entityId, oldPath)) {
+ found = node
+ return []
+ }
+ if (node.children) {
+ const result = extractFileTreeNode(node.children, kind, entityId, oldPath)
+ if (result.node) found = result.node
+ return [{ ...node, children: result.files }]
+ }
+ return [node]
+ })
+
+ return { files: nextFiles, node: found }
+}
+
+function insertExistingNode(files: FileNode[], node: FileNode): FileNode[] {
+ const parts = stripTrailingSlash(node.path).split('/').filter(Boolean)
+ if (parts.length === 0) return files
+
+ const insert = (nodes: FileNode[], depth: number, prefix: string): FileNode[] => {
+ const name = parts[depth]
+ const isLeaf = depth === parts.length - 1
+ const currentPath = isLeaf ? node.path : `${prefix}${name}/`
+ const index = nodes.findIndex((candidate) => candidate.path === currentPath)
+
+ if (isLeaf) {
+ return withNode(nodes, index, node)
+ }
+
+ const existing = index >= 0 ? nodes[index] : undefined
+ const folderNode: FileNode = {
+ name,
+ path: currentPath,
+ isDir: true,
+ folderId: existing?.folderId,
+ children: insert(existing?.children ?? [], depth + 1, currentPath)
+ }
+
+ return withNode(nodes, index, folderNode)
+ }
+
+ return insert(files, 0, '')
+}
+
+function moveFileTreeNode(files: FileNode[], entity: SyncEntityMoved): FileNode[] {
+ const { files: withoutNode, node } = extractFileTreeNode(files, entity.kind, entity.entityId, entity.oldPath)
+ if (!node) return files
+
+ const newPath = normalizePath(entity.newPath, entity.kind)
+ const movedNode: FileNode = {
+ ...node,
+ path: newPath,
+ children: node.isDir
+ ? node.children?.map((child) => rewriteNodePath(child, entity.oldPath, entity.newPath))
+ : node.children
+ }
+
+ return insertExistingNode(withoutNode, movedNode)
+}
+
+function isAffectedPath(path: string, kind: SyncEntityKind, relPath: string): boolean {
+ if (kind !== 'folder') return path === normalizePath(relPath, kind)
+ const prefix = normalizePath(relPath, 'folder')
+ return path.startsWith(prefix)
+}
+
+function rewritePath(path: string, kind: SyncEntityKind, oldPath: string, newPath: string): string {
+ if (kind !== 'folder') {
+ return path === normalizePath(oldPath, kind) ? normalizePath(newPath, kind) : path
+ }
+
+ const oldPrefix = normalizePath(oldPath, 'folder')
+ const newPrefix = normalizePath(newPath, 'folder')
+ return path.startsWith(oldPrefix) ? newPrefix + path.slice(oldPrefix.length) : path
+}
+
+function rewriteDocMaps(
+ docPathMap: Record<string, string>,
+ kind: SyncEntityKind,
+ entityId: string,
+ oldPath: string,
+ newPath: string
+) {
+ const nextDocPathMap: Record<string, string> = {}
+ const nextPathDocMap: Record<string, string> = {}
+
+ for (const [docId, path] of Object.entries(docPathMap)) {
+ const rewritten = kind === 'doc' && docId === entityId
+ ? normalizePath(newPath, 'doc')
+ : rewritePath(path, kind, oldPath, newPath)
+ nextDocPathMap[docId] = rewritten
+ nextPathDocMap[rewritten] = docId
+ }
+
+ return { docPathMap: nextDocPathMap, pathDocMap: nextPathDocMap }
+}
+
+function rewriteFileRefs(
+ fileRefs: Array<{ id: string; path: string }>,
+ kind: SyncEntityKind,
+ entityId: string,
+ oldPath: string,
+ newPath: string
+) {
+ return fileRefs.map((ref) => ({
+ ...ref,
+ path: kind === 'file' && ref.id === entityId
+ ? normalizePath(newPath, 'file')
+ : rewritePath(ref.path, kind, oldPath, newPath)
+ }))
+}
+
+function rewriteOpenState(
+ openTabs: Array<{ path: string; name: string; modified: boolean }>,
+ activeTab: string | null,
+ fileContents: Record<string, string>,
+ kind: SyncEntityKind,
+ oldPath: string,
+ newPath: string
+) {
+ const nextOpenTabs = openTabs.map((tab) => {
+ const path = rewritePath(tab.path, kind, oldPath, newPath)
+ return {
+ ...tab,
+ path,
+ name: path.split('/').pop() || tab.name
+ }
+ })
+ const nextActiveTab = activeTab ? rewritePath(activeTab, kind, oldPath, newPath) : activeTab
+ const nextFileContents: Record<string, string> = {}
+ for (const [path, content] of Object.entries(fileContents)) {
+ nextFileContents[rewritePath(path, kind, oldPath, newPath)] = content
+ }
+
+ return { openTabs: nextOpenTabs, activeTab: nextActiveTab, fileContents: nextFileContents }
+}
+
+function removeOpenState(
+ openTabs: Array<{ path: string; name: string; modified: boolean }>,
+ activeTab: string | null,
+ fileContents: Record<string, string>,
+ kind: SyncEntityKind,
+ relPath: string
+) {
+ const nextOpenTabs = openTabs.filter((tab) => !isAffectedPath(tab.path, kind, relPath))
+ const nextActiveTab = activeTab && isAffectedPath(activeTab, kind, relPath)
+ ? (nextOpenTabs[nextOpenTabs.length - 1]?.path ?? null)
+ : activeTab
+ const nextFileContents: Record<string, string> = {}
+ for (const [path, content] of Object.entries(fileContents)) {
+ if (!isAffectedPath(path, kind, relPath)) nextFileContents[path] = content
+ }
+
+ return { openTabs: nextOpenTabs, activeTab: nextActiveTab, fileContents: nextFileContents }
+}
+
+export function applyEntityCreated(entity: SyncEntityCreated): void {
+ useAppStore.setState((state) => {
+ const files = upsertFileTreeNode(state.files, entity)
+ const docPathMap = { ...state.docPathMap }
+ const pathDocMap = { ...state.pathDocMap }
+ let fileRefs = state.fileRefs
+
+ if (entity.kind === 'doc') {
+ const relPath = normalizePath(entity.relPath, entity.kind)
+ docPathMap[entity.entityId] = relPath
+ pathDocMap[relPath] = entity.entityId
+ } else if (entity.kind === 'file') {
+ const relPath = normalizePath(entity.relPath, entity.kind)
+ fileRefs = [
+ ...state.fileRefs.filter((ref) => ref.id !== entity.entityId && ref.path !== relPath),
+ { id: entity.entityId, path: relPath }
+ ]
+ }
+
+ return { files, docPathMap, pathDocMap, fileRefs }
+ })
+}
+
+export function applyEntityRemoved(entity: SyncEntityRemoved): void {
+ useAppStore.setState((state) => {
+ const relPath = normalizePath(entity.relPath, entity.kind)
+ const files = removeFileTreeNode(state.files, entity)
+ const docPathMap: Record<string, string> = {}
+ const pathDocMap: Record<string, string> = {}
+
+ for (const [docId, path] of Object.entries(state.docPathMap)) {
+ if (entity.kind === 'doc' ? docId === entity.entityId : isAffectedPath(path, entity.kind, relPath)) {
+ continue
+ }
+ docPathMap[docId] = path
+ pathDocMap[path] = docId
+ }
+
+ const fileRefs = state.fileRefs.filter((ref) => (
+ entity.kind === 'file'
+ ? ref.id !== entity.entityId
+ : !isAffectedPath(ref.path, entity.kind, relPath)
+ ))
+ const openState = removeOpenState(state.openTabs, state.activeTab, state.fileContents, entity.kind, relPath)
+ const mainDocument = entity.kind === 'doc' && state.mainDocument === entity.entityId
+ ? null
+ : state.mainDocument
+
+ return { files, docPathMap, pathDocMap, fileRefs, mainDocument, ...openState }
+ })
+}
+
+export function applyEntityRenamed(entity: SyncEntityRenamed): void {
+ useAppStore.setState((state) => {
+ const files = renameFileTreeNode(state.files, entity)
+ const maps = rewriteDocMaps(state.docPathMap, entity.kind, entity.entityId, entity.oldPath, entity.newPath)
+ const fileRefs = rewriteFileRefs(state.fileRefs, entity.kind, entity.entityId, entity.oldPath, entity.newPath)
+ const openState = rewriteOpenState(
+ state.openTabs,
+ state.activeTab,
+ state.fileContents,
+ entity.kind,
+ entity.oldPath,
+ entity.newPath
+ )
+
+ return { files, ...maps, fileRefs, ...openState }
+ })
+}
+
+export function applyEntityMoved(entity: SyncEntityMoved): void {
+ useAppStore.setState((state) => {
+ const files = moveFileTreeNode(state.files, entity)
+ const maps = rewriteDocMaps(state.docPathMap, entity.kind, entity.entityId, entity.oldPath, entity.newPath)
+ const fileRefs = rewriteFileRefs(state.fileRefs, entity.kind, entity.entityId, entity.oldPath, entity.newPath)
+ const openState = rewriteOpenState(
+ state.openTabs,
+ state.activeTab,
+ state.fileContents,
+ entity.kind,
+ entity.oldPath,
+ entity.newPath
+ )
+
+ return { files, ...maps, fileRefs, ...openState }
+ })
+}