summaryrefslogtreecommitdiff
path: root/src/main
diff options
context:
space:
mode:
Diffstat (limited to 'src/main')
-rw-r--r--src/main/compilationManager.ts162
-rw-r--r--src/main/fileSyncBridge.ts364
-rw-r--r--src/main/index.ts1219
-rw-r--r--src/main/otClient.ts131
-rw-r--r--src/main/otTransform.ts117
-rw-r--r--src/main/otTypes.ts31
-rw-r--r--src/main/overleafProtocol.ts95
-rw-r--r--src/main/overleafSocket.ts401
8 files changed, 2118 insertions, 402 deletions
diff --git a/src/main/compilationManager.ts b/src/main/compilationManager.ts
new file mode 100644
index 0000000..3529345
--- /dev/null
+++ b/src/main/compilationManager.ts
@@ -0,0 +1,162 @@
+// Manages temp directory for Overleaf socket-mode compilation
+import { join, basename } from 'path'
+import { writeFile, mkdir, rm } from 'fs/promises'
+import { existsSync } from 'fs'
+import { spawn } from 'child_process'
+import { net } from 'electron'
+
+export class CompilationManager {
+ private tmpDir: string
+ private projectId: string
+ private cookie: string
+ private docContents = new Map<string, string>() // docPath → content
+ private fileRefCache = new Map<string, boolean>() // fileRefPath → downloaded
+
+ constructor(projectId: string, cookie: string) {
+ this.projectId = projectId
+ this.cookie = cookie
+ this.tmpDir = join(require('os').tmpdir(), `claudetex-${projectId}`)
+ }
+
+ get dir(): string {
+ return this.tmpDir
+ }
+
+ /** Check if a doc is already stored */
+ hasDoc(relativePath: string): boolean {
+ return this.docContents.has(relativePath)
+ }
+
+ /** Store doc content (called when docs are joined/updated) */
+ setDocContent(relativePath: string, content: string) {
+ // Strip C1 control characters (U+0080-U+009F) — Overleaf embeds these as
+ // range markers for tracked changes / comments. They break pdflatex.
+ this.docContents.set(relativePath, content.replace(/[\u0080-\u009F]/g, ''))
+ }
+
+ /** Write all doc contents to disk */
+ async syncDocs(): Promise<void> {
+ await mkdir(this.tmpDir, { recursive: true })
+ for (const [relPath, content] of this.docContents) {
+ const fullPath = join(this.tmpDir, relPath)
+ const dir = fullPath.substring(0, fullPath.lastIndexOf('/'))
+ await mkdir(dir, { recursive: true })
+ await writeFile(fullPath, content, 'utf-8')
+ }
+ }
+
+ /** Download a binary file (image, .bst, etc.) from Overleaf */
+ async downloadFile(fileRefId: string, relativePath: string): Promise<void> {
+ if (this.fileRefCache.has(relativePath)) return
+
+ const fullPath = join(this.tmpDir, relativePath)
+ const dir = fullPath.substring(0, fullPath.lastIndexOf('/'))
+ await mkdir(dir, { recursive: true })
+
+ return new Promise((resolve, reject) => {
+ const url = `https://www.overleaf.com/project/${this.projectId}/file/${fileRefId}`
+ const req = net.request(url)
+ req.setHeader('Cookie', this.cookie)
+ req.setHeader('User-Agent', 'Mozilla/5.0')
+
+ const chunks: Buffer[] = []
+ req.on('response', (res) => {
+ res.on('data', (chunk) => chunks.push(chunk as Buffer))
+ res.on('end', async () => {
+ try {
+ const { writeFile: wf } = await import('fs/promises')
+ await wf(fullPath, Buffer.concat(chunks))
+ this.fileRefCache.set(relativePath, true)
+ resolve()
+ } catch (e) {
+ reject(e)
+ }
+ })
+ })
+ req.on('error', reject)
+ req.end()
+ })
+ }
+
+ /** Download all binary files in the project */
+ async syncBinaries(fileRefs: Array<{ id: string; path: string }>): Promise<void> {
+ for (const ref of fileRefs) {
+ try {
+ await this.downloadFile(ref.id, ref.path)
+ } catch (e) {
+ console.log(`[CompilationManager] failed to download ${ref.path}:`, e)
+ }
+ }
+ }
+
+ /** Run latexmk compilation */
+ async compile(
+ mainTexRelPath: string,
+ onLog: (data: string) => void
+ ): Promise<{ success: boolean; log: string; pdfPath: string }> {
+ await this.syncDocs()
+
+ const texPaths = [
+ '/Library/TeX/texbin',
+ '/usr/local/texlive/2024/bin/universal-darwin',
+ '/usr/texbin',
+ '/opt/homebrew/bin'
+ ]
+ const envPath = texPaths.join(':') + ':' + (process.env.PATH || '')
+
+ // Use // suffix for recursive search of ALL subdirectories in the project tree.
+ // This ensures .sty, .bst, .cls, images, etc. are always found regardless of nesting.
+ const texInputs = `${this.tmpDir}//:`
+ const texBase = basename(mainTexRelPath, '.tex')
+ const pdfPath = join(this.tmpDir, texBase + '.pdf')
+
+ const args = [
+ '-pdf', '-f', '-g', '-bibtex', '-synctex=1',
+ '-interaction=nonstopmode', '-file-line-error',
+ '-outdir=' + this.tmpDir,
+ mainTexRelPath
+ ]
+ console.log('[compile] cwd:', this.tmpDir)
+ console.log('[compile] args:', args.join(' '))
+ console.log('[compile] TEXINPUTS:', texInputs)
+ console.log('[compile] pdfPath:', pdfPath)
+ console.log('[compile] docs synced:', this.docContents.size, 'files:', [...this.docContents.keys()].slice(0, 5))
+
+ return new Promise((resolve) => {
+ let log = ''
+ const proc = spawn('latexmk', args, {
+ cwd: this.tmpDir,
+ env: { ...process.env, PATH: envPath, TEXINPUTS: texInputs, BIBINPUTS: texInputs, BSTINPUTS: texInputs }
+ })
+
+ proc.stdout.on('data', (data) => {
+ const s = data.toString()
+ log += s
+ onLog(s)
+ })
+
+ proc.stderr.on('data', (data) => {
+ const s = data.toString()
+ log += s
+ onLog(s)
+ })
+
+ proc.on('close', (code) => {
+ resolve({ success: code === 0, log, pdfPath })
+ })
+
+ proc.on('error', (err) => {
+ resolve({ success: false, log: log + '\n' + err.message, pdfPath })
+ })
+ })
+ }
+
+ /** Clean up temp directory */
+ async cleanup(): Promise<void> {
+ try {
+ if (existsSync(this.tmpDir)) {
+ await rm(this.tmpDir, { recursive: true })
+ }
+ } catch { /* ignore */ }
+ }
+}
diff --git a/src/main/fileSyncBridge.ts b/src/main/fileSyncBridge.ts
new file mode 100644
index 0000000..e0529cb
--- /dev/null
+++ b/src/main/fileSyncBridge.ts
@@ -0,0 +1,364 @@
+// Bidirectional file sync bridge: temp dir ↔ Overleaf via OT
+import { join } from 'path'
+import { readFile, writeFile, mkdir } from 'fs/promises'
+import { createHash } from 'crypto'
+import * as chokidar from 'chokidar'
+import { diff_match_patch } from 'diff-match-patch'
+import type { BrowserWindow } from 'electron'
+import type { OverleafSocket } from './overleafSocket'
+import { OtClient } from './otClient'
+import type { OtOp } from './otTypes'
+import { isInsert, isDelete } from './otTypes'
+
+const dmp = new diff_match_patch()
+
+export class FileSyncBridge {
+ private lastKnownContent = new Map<string, string>() // relPath → content
+ private writesInProgress = new Set<string>() // relPaths being written by bridge
+ private debounceTimers = new Map<string, ReturnType<typeof setTimeout>>()
+ private otClients = new Map<string, OtClient>() // docId → OtClient (non-editor docs)
+ private editorDocs = new Set<string>() // docIds owned by renderer
+ private watcher: chokidar.FSWatcher | null = null
+
+ private socket: OverleafSocket
+ private tmpDir: string
+ private docPathMap: Record<string, string> // docId → relPath
+ private pathDocMap: Record<string, string> // relPath → docId
+ private mainWindow: BrowserWindow
+
+ private serverEventHandler: ((name: string, args: unknown[]) => void) | null = null
+ private stopped = false
+
+ constructor(
+ socket: OverleafSocket,
+ tmpDir: string,
+ docPathMap: Record<string, string>,
+ pathDocMap: Record<string, string>,
+ mainWindow: BrowserWindow
+ ) {
+ this.socket = socket
+ this.tmpDir = tmpDir
+ this.docPathMap = docPathMap
+ this.pathDocMap = pathDocMap
+ this.mainWindow = mainWindow
+ }
+
+ async start(): Promise<void> {
+ // Join ALL docs, fetch content, write to disk
+ await mkdir(this.tmpDir, { recursive: true })
+
+ const docIds = Object.keys(this.docPathMap)
+ for (const docId of docIds) {
+ const relPath = this.docPathMap[docId]
+ try {
+ const result = await this.socket.joinDoc(docId)
+ const content = (result.docLines || []).join('\n')
+ this.lastKnownContent.set(relPath, content)
+
+ // Create OtClient for this doc (bridge owns it initially)
+ const otClient = new OtClient(
+ result.version,
+ (ops, version) => this.sendOps(docId, ops, version),
+ (ops) => this.onRemoteApply(docId, ops)
+ )
+ this.otClients.set(docId, otClient)
+
+ // Write to disk
+ await this.writeToDisk(relPath, content)
+ } catch (e) {
+ console.log(`[FileSyncBridge] failed to join doc ${relPath}:`, e)
+ }
+ }
+
+ // Listen for server events (remote ops on non-editor docs)
+ this.serverEventHandler = (name: string, args: unknown[]) => {
+ if (name === 'otUpdateApplied') {
+ const update = args[0] as { doc?: string; op?: OtOp[]; v?: number } | undefined
+ if (!update?.doc) return
+ const docId = update.doc
+
+ // For non-editor docs, process remote ops through bridge's OtClient
+ if (!this.editorDocs.has(docId) && update.op && update.v !== undefined) {
+ const otClient = this.otClients.get(docId)
+ if (otClient) {
+ otClient.onRemoteOps(update.op, update.v)
+ }
+ }
+
+ // For non-editor docs, handle ack (op with no ops array = ack for our own op)
+ if (!this.editorDocs.has(docId) && !update.op) {
+ const otClient = this.otClients.get(docId)
+ if (otClient) {
+ otClient.onAck()
+ }
+ }
+ }
+ }
+ this.socket.on('serverEvent', this.serverEventHandler)
+
+ // Start watching the temp dir
+ this.watcher = chokidar.watch(this.tmpDir, {
+ ignoreInitial: true,
+ awaitWriteFinish: { stabilityThreshold: 100, pollInterval: 50 },
+ ignored: [
+ /(^|[/\\])\../, // dotfiles
+ /\.(aux|log|pdf|fls|fdb_latexmk|synctex\.gz|bbl|blg|out|toc|lof|lot|nav|snm|vrb)$/ // LaTeX output files
+ ]
+ })
+
+ this.watcher.on('change', (absPath: string) => {
+ const relPath = absPath.replace(this.tmpDir + '/', '')
+ this.onFileChanged(relPath)
+ })
+
+ this.watcher.on('add', (absPath: string) => {
+ const relPath = absPath.replace(this.tmpDir + '/', '')
+ // Only process if it's a known doc
+ if (this.pathDocMap[relPath]) {
+ this.onFileChanged(relPath)
+ }
+ })
+
+ console.log(`[FileSyncBridge] started, watching ${this.tmpDir}, ${docIds.length} docs synced`)
+ }
+
+ async stop(): Promise<void> {
+ this.stopped = true
+
+ // Clear all debounce timers
+ for (const timer of this.debounceTimers.values()) {
+ clearTimeout(timer)
+ }
+ this.debounceTimers.clear()
+
+ // Remove server event handler
+ if (this.serverEventHandler) {
+ this.socket.removeListener('serverEvent', this.serverEventHandler)
+ this.serverEventHandler = null
+ }
+
+ // Close watcher
+ if (this.watcher) {
+ await this.watcher.close()
+ this.watcher = null
+ }
+
+ this.otClients.clear()
+ this.lastKnownContent.clear()
+ this.writesInProgress.clear()
+ this.editorDocs.clear()
+
+ console.log('[FileSyncBridge] stopped')
+ }
+
+ // ── Disk change handler ──────────────────────────────────────
+
+ private onFileChanged(relPath: string): void {
+ if (this.stopped) return
+
+ // Layer 1: Skip if bridge is currently writing this file
+ if (this.writesInProgress.has(relPath)) return
+
+ // Layer 3: Debounce 300ms per file
+ const existing = this.debounceTimers.get(relPath)
+ if (existing) clearTimeout(existing)
+
+ this.debounceTimers.set(relPath, setTimeout(() => {
+ this.debounceTimers.delete(relPath)
+ this.processChange(relPath)
+ }, 300))
+ }
+
+ private async processChange(relPath: string): Promise<void> {
+ if (this.stopped) return
+
+ const docId = this.pathDocMap[relPath]
+ if (!docId) return
+
+ let newContent: string
+ try {
+ newContent = await readFile(join(this.tmpDir, relPath), 'utf-8')
+ } catch {
+ return // file deleted or unreadable
+ }
+
+ const lastKnown = this.lastKnownContent.get(relPath)
+
+ // Layer 2: Content equality check
+ if (newContent === lastKnown) return
+
+ console.log(`[FileSyncBridge] disk change detected: ${relPath} (${(newContent.length)} chars)`)
+
+ if (this.editorDocs.has(docId)) {
+ // Doc is open in editor → send to renderer via IPC
+ this.lastKnownContent.set(relPath, newContent)
+ this.mainWindow.webContents.send('sync:externalEdit', { docId, content: newContent })
+ } else {
+ // Doc NOT open in editor → bridge handles OT directly
+ const oldContent = lastKnown ?? ''
+ this.lastKnownContent.set(relPath, newContent)
+
+ const diffs = dmp.diff_main(oldContent, newContent)
+ dmp.diff_cleanupEfficiency(diffs)
+ const ops = diffsToOtOps(diffs)
+
+ if (ops.length > 0) {
+ const otClient = this.otClients.get(docId)
+ if (otClient) {
+ otClient.onLocalOps(ops)
+ }
+ }
+ }
+ }
+
+ // ── Send OT ops to Overleaf (for non-editor docs) ───────────
+
+ private sendOps(docId: string, ops: OtOp[], version: number): void {
+ const relPath = this.docPathMap[docId]
+ const content = relPath ? this.lastKnownContent.get(relPath) ?? '' : ''
+ const hash = createHash('sha1').update(content).digest('hex')
+ this.socket.applyOtUpdate(docId, ops, version, hash)
+ }
+
+ // ── Apply remote ops (for non-editor docs) ──────────────────
+
+ private onRemoteApply(docId: string, ops: OtOp[]): void {
+ const relPath = this.docPathMap[docId]
+ if (!relPath) return
+
+ const currentContent = this.lastKnownContent.get(relPath) ?? ''
+ const newContent = applyOpsToText(currentContent, ops)
+ this.lastKnownContent.set(relPath, newContent)
+ this.writeToDisk(relPath, newContent)
+ }
+
+ // ── Called by main process when editor/remote changes content ─
+
+ /** Called when renderer notifies bridge that editor content changed */
+ onEditorContentChanged(docId: string, content: string): void {
+ const relPath = this.docPathMap[docId]
+ if (!relPath) return
+
+ // Update last known content
+ this.lastKnownContent.set(relPath, content)
+
+ // Write to disk so external tools can see the change
+ this.writeToDisk(relPath, content)
+ }
+
+ // ── Editor doc tracking ──────────────────────────────────────
+
+ /** Renderer opened this doc in the editor — bridge stops owning OT */
+ addEditorDoc(docId: string): void {
+ this.editorDocs.add(docId)
+ // Bridge's OtClient for this doc is no longer used (renderer has its own)
+ // But we keep the doc joined in the socket
+ }
+
+ /** Renderer closed this doc from the editor — bridge takes over OT */
+ removeEditorDoc(docId: string): void {
+ this.editorDocs.delete(docId)
+
+ // Re-join the doc to get fresh version, since renderer's OtClient was tracking it
+ const relPath = this.docPathMap[docId]
+ if (!relPath) return
+
+ this.socket.joinDoc(docId).then((result) => {
+ const content = (result.docLines || []).join('\n')
+ this.lastKnownContent.set(relPath, content)
+
+ // Create fresh OtClient with current version
+ const otClient = new OtClient(
+ result.version,
+ (ops, version) => this.sendOps(docId, ops, version),
+ (ops) => this.onRemoteApply(docId, ops)
+ )
+ this.otClients.set(docId, otClient)
+
+ // Write latest content to disk
+ this.writeToDisk(relPath, content)
+ }).catch((e) => {
+ console.log(`[FileSyncBridge] failed to re-join doc ${relPath}:`, e)
+ })
+ }
+
+ // ── Helpers ──────────────────────────────────────────────────
+
+ private async writeToDisk(relPath: string, content: string): Promise<void> {
+ const fullPath = join(this.tmpDir, relPath)
+ const dir = fullPath.substring(0, fullPath.lastIndexOf('/'))
+
+ // Set write guard
+ this.writesInProgress.add(relPath)
+
+ try {
+ await mkdir(dir, { recursive: true })
+ await writeFile(fullPath, content, 'utf-8')
+ } catch (e) {
+ console.log(`[FileSyncBridge] write error for ${relPath}:`, e)
+ }
+
+ // Clear write guard after 150ms (chokidar needs time to fire & be ignored)
+ setTimeout(() => {
+ this.writesInProgress.delete(relPath)
+ }, 150)
+ }
+
+ /** Get the temp dir path */
+ get dir(): string {
+ return this.tmpDir
+ }
+
+ /** Get content for a doc (used by compilation manager) */
+ getDocContent(relPath: string): string | undefined {
+ return this.lastKnownContent.get(relPath)
+ }
+
+ /** Check if a doc's content is known */
+ hasDoc(relPath: string): boolean {
+ return this.lastKnownContent.has(relPath)
+ }
+}
+
+// ── Utility functions ────────────────────────────────────────
+
+/** Convert diff-match-patch diffs to OT ops */
+function diffsToOtOps(diffs: [number, string][]): OtOp[] {
+ const ops: OtOp[] = []
+ let pos = 0
+
+ for (const [type, text] of diffs) {
+ switch (type) {
+ case 0: // DIFF_EQUAL
+ pos += text.length
+ break
+ case 1: // DIFF_INSERT
+ ops.push({ i: text, p: pos })
+ pos += text.length
+ break
+ case -1: // DIFF_DELETE
+ ops.push({ d: text, p: pos })
+ // Don't advance pos — deletion doesn't move cursor forward
+ break
+ }
+ }
+
+ return ops
+}
+
+/** Apply OT ops to a text string */
+function applyOpsToText(text: string, ops: OtOp[]): string {
+ // Sort ops by position descending so we can apply without position shifting
+ const sortedOps = [...ops].sort((a, b) => b.p - a.p)
+
+ for (const op of sortedOps) {
+ if (isInsert(op)) {
+ text = text.slice(0, op.p) + op.i + text.slice(op.p)
+ } else if (isDelete(op)) {
+ text = text.slice(0, op.p) + text.slice(op.p + op.d.length)
+ }
+ // Comment ops don't modify text
+ }
+
+ return text
+}
diff --git a/src/main/index.ts b/src/main/index.ts
index 0adbe79..21b6e43 100644
--- a/src/main/index.ts
+++ b/src/main/index.ts
@@ -1,14 +1,17 @@
import { app, BrowserWindow, ipcMain, dialog, shell, net } from 'electron'
-import { join, basename, extname, dirname } from 'path'
-import { readdir, readFile, writeFile, stat, mkdir, rename, unlink, rm } from 'fs/promises'
-import { spawn, type ChildProcess } from 'child_process'
-import { watch } from 'chokidar'
+import { join, basename } from 'path'
+import { readFile, writeFile } from 'fs/promises'
+import { spawn } from 'child_process'
import * as pty from 'node-pty'
+import { OverleafSocket, type RootFolder, type SubFolder, type JoinDocResult } from './overleafSocket'
+import { CompilationManager } from './compilationManager'
+import { FileSyncBridge } from './fileSyncBridge'
let mainWindow: BrowserWindow | null = null
let ptyInstance: pty.IPty | null = null
-let fileWatcher: ReturnType<typeof watch> | null = null
-let compileProcess: ChildProcess | null = null
+let overleafSock: OverleafSocket | null = null
+let compilationManager: CompilationManager | null = null
+let fileSyncBridge: FileSyncBridge | null = null
function createWindow(): void {
mainWindow = new BrowserWindow({
@@ -32,158 +35,15 @@ function createWindow(): void {
}
}
-// ── File System IPC ──────────────────────────────────────────────
-
-interface FileNode {
- name: string
- path: string
- isDir: boolean
- children?: FileNode[]
-}
-
-async function readDirRecursive(dirPath: string, depth = 0): Promise<FileNode[]> {
- if (depth > 5) return []
- const entries = await readdir(dirPath, { withFileTypes: true })
- const nodes: FileNode[] = []
-
- for (const entry of entries) {
- if (entry.name.startsWith('.') || entry.name === 'node_modules' || entry.name === 'out') continue
-
- const fullPath = join(dirPath, entry.name)
- if (entry.isDirectory()) {
- const children = await readDirRecursive(fullPath, depth + 1)
- nodes.push({ name: entry.name, path: fullPath, isDir: true, children })
- } else {
- const ext = extname(entry.name).toLowerCase()
- if (['.tex', '.bib', '.cls', '.sty', '.bst', '.txt', '.md', '.log', '.aux', '.pdf', '.png', '.jpg', '.jpeg', '.svg'].includes(ext)) {
- nodes.push({ name: entry.name, path: fullPath, isDir: false })
- }
- }
- }
-
- return nodes.sort((a, b) => {
- if (a.isDir && !b.isDir) return -1
- if (!a.isDir && b.isDir) return 1
- return a.name.localeCompare(b.name)
- })
-}
-
-ipcMain.handle('dialog:openProject', async () => {
- const result = await dialog.showOpenDialog(mainWindow!, {
- properties: ['openDirectory'],
- title: 'Open LaTeX Project'
- })
- if (result.canceled) return null
- return result.filePaths[0]
-})
-
-ipcMain.handle('dialog:selectSaveDir', async () => {
- const result = await dialog.showOpenDialog(mainWindow!, {
- properties: ['openDirectory', 'createDirectory'],
- title: 'Choose where to clone the project'
- })
- if (result.canceled) return null
- return result.filePaths[0]
-})
-
-ipcMain.handle('fs:readDir', async (_e, dirPath: string) => {
- return readDirRecursive(dirPath)
-})
-
ipcMain.handle('fs:readFile', async (_e, filePath: string) => {
return readFile(filePath, 'utf-8')
})
-// Find the main .tex file (contains \documentclass) in a project
-ipcMain.handle('fs:findMainTex', async (_e, dirPath: string) => {
- async function search(dir: string, depth: number): Promise<string | null> {
- if (depth > 3) return null
- const entries = await readdir(dir, { withFileTypes: true })
- const texFiles: string[] = []
- const dirs: string[] = []
- for (const entry of entries) {
- if (entry.name.startsWith('.') || entry.name === 'node_modules' || entry.name === 'out') continue
- const full = join(dir, entry.name)
- if (entry.isDirectory()) dirs.push(full)
- else if (entry.name.endsWith('.tex')) texFiles.push(full)
- }
- for (const f of texFiles) {
- try {
- const content = await readFile(f, 'utf-8')
- if (/\\documentclass/.test(content)) return f
- } catch { /* skip */ }
- }
- for (const d of dirs) {
- const found = await search(d, depth + 1)
- if (found) return found
- }
- return null
- }
- return search(dirPath, 0)
-})
-
ipcMain.handle('fs:readBinary', async (_e, filePath: string) => {
const buffer = await readFile(filePath)
return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)
})
-ipcMain.handle('fs:writeFile', async (_e, filePath: string, content: string) => {
- await writeFile(filePath, content, 'utf-8')
-})
-
-ipcMain.handle('fs:createFile', async (_e, dirPath: string, fileName: string) => {
- const fullPath = join(dirPath, fileName)
- await writeFile(fullPath, '', 'utf-8')
- return fullPath
-})
-
-ipcMain.handle('fs:createDir', async (_e, dirPath: string, dirName: string) => {
- const fullPath = join(dirPath, dirName)
- await mkdir(fullPath, { recursive: true })
- return fullPath
-})
-
-ipcMain.handle('fs:rename', async (_e, oldPath: string, newPath: string) => {
- await rename(oldPath, newPath)
-})
-
-ipcMain.handle('fs:delete', async (_e, filePath: string) => {
- const s = await stat(filePath)
- if (s.isDirectory()) {
- await rm(filePath, { recursive: true })
- } else {
- await unlink(filePath)
- }
-})
-
-ipcMain.handle('fs:stat', async (_e, filePath: string) => {
- const s = await stat(filePath)
- return { isDir: s.isDirectory(), size: s.size, mtime: s.mtimeMs }
-})
-
-// ── File Watcher ─────────────────────────────────────────────────
-
-ipcMain.handle('watcher:start', async (_e, dirPath: string) => {
- if (fileWatcher) {
- await fileWatcher.close()
- }
- fileWatcher = watch(dirPath, {
- ignored: /(^|[/\\])(\.|node_modules|out|\.aux|\.log|\.fls|\.fdb_latexmk|\.synctex)/,
- persistent: true,
- depth: 5
- })
- fileWatcher.on('all', (event, path) => {
- mainWindow?.webContents.send('watcher:change', { event, path })
- })
-})
-
-ipcMain.handle('watcher:stop', async () => {
- if (fileWatcher) {
- await fileWatcher.close()
- fileWatcher = null
- }
-})
-
// ── LaTeX Compilation ────────────────────────────────────────────
// Ensure TeX binaries are in PATH (Electron launched from Finder may miss them)
@@ -195,113 +55,6 @@ for (const p of texPaths) {
}
}
-// Parse missing packages from compile log
-function parseMissingPackages(log: string): string[] {
- const missing = new Set<string>()
- // Match "File `xxx.sty' not found"
- const styRegex = /File `([^']+\.sty)' not found/g
- let m: RegExpExecArray | null
- while ((m = styRegex.exec(log)) !== null) {
- missing.add(m[1].replace(/\.sty$/, ''))
- }
- // Match "Metric (TFM) file not found" for fonts
- const tfmRegex = /Font [^=]+=(\w+) .* not loadable: Metric/g
- while ((m = tfmRegex.exec(log)) !== null) {
- missing.add(m[1])
- }
- return [...missing]
-}
-
-// Find which tlmgr packages provide the missing files
-async function findTlmgrPackages(names: string[]): Promise<string[]> {
- const packages = new Set<string>()
- for (const name of names) {
- const result = await new Promise<string>((resolve) => {
- let out = ''
- const proc = spawn('tlmgr', ['search', '--file', `${name}.sty`], { env: process.env })
- proc.stdout?.on('data', (d) => { out += d.toString() })
- proc.stderr?.on('data', (d) => { out += d.toString() })
- proc.on('close', () => resolve(out))
- proc.on('error', () => resolve(''))
- })
- // tlmgr search output: "package_name:\n texmf-dist/..."
- const pkgMatch = result.match(/^(\S+):$/m)
- if (pkgMatch) {
- packages.add(pkgMatch[1])
- } else {
- // Fallback: use the name itself as package name
- packages.add(name)
- }
- }
- return [...packages]
-}
-
-ipcMain.handle('latex:compile', async (_e, filePath: string) => {
- if (compileProcess) {
- compileProcess.kill()
- }
-
- const dir = dirname(filePath)
- const file = basename(filePath)
-
- return new Promise<{ success: boolean; log: string; missingPackages?: string[] }>((resolve) => {
- let log = ''
- compileProcess = spawn('latexmk', ['-pdf', '-f', '-g', '-bibtex', '-synctex=1', '-interaction=nonstopmode', '-file-line-error', file], {
- cwd: dir,
- env: process.env
- })
-
- compileProcess.stdout?.on('data', (data) => {
- log += data.toString()
- mainWindow?.webContents.send('latex:log', data.toString())
- })
- compileProcess.stderr?.on('data', (data) => {
- log += data.toString()
- mainWindow?.webContents.send('latex:log', data.toString())
- })
- compileProcess.on('close', async (code) => {
- compileProcess = null
- if (code !== 0) {
- const missing = parseMissingPackages(log)
- if (missing.length > 0) {
- const packages = await findTlmgrPackages(missing)
- resolve({ success: false, log, missingPackages: packages })
- return
- }
- }
- resolve({ success: code === 0, log })
- })
- compileProcess.on('error', (err) => {
- compileProcess = null
- resolve({ success: false, log: err.message })
- })
- })
-})
-
-// Install TeX packages via tlmgr (runs in PTY so sudo can prompt for password)
-ipcMain.handle('latex:installPackages', async (_e, packages: string[]) => {
- if (!packages.length) return { success: false, message: 'No packages specified' }
-
- // Try without sudo first
- const tryDirect = await new Promise<{ success: boolean; message: string }>((resolve) => {
- let out = ''
- const proc = spawn('tlmgr', ['install', ...packages], { env: process.env })
- proc.stdout?.on('data', (d) => { out += d.toString() })
- proc.stderr?.on('data', (d) => { out += d.toString() })
- proc.on('close', (code) => resolve({ success: code === 0, message: out }))
- proc.on('error', (err) => resolve({ success: false, message: err.message }))
- })
-
- if (tryDirect.success) return tryDirect
-
- // Need sudo — run in PTY terminal so user can enter password
- return { success: false, message: 'need_sudo', packages }
-})
-
-ipcMain.handle('latex:getPdfPath', async (_e, texPath: string) => {
- return texPath.replace(/\.tex$/, '.pdf')
-})
-
// SyncTeX: PDF position → source file:line (inverse search)
ipcMain.handle('synctex:editFromPdf', async (_e, pdfPath: string, page: number, x: number, y: number) => {
return new Promise<{ file: string; line: number } | null>((resolve) => {
@@ -326,33 +79,6 @@ ipcMain.handle('synctex:editFromPdf', async (_e, pdfPath: string, page: number,
})
})
-// SyncTeX: source file:line → PDF page + position (forward search)
-ipcMain.handle('synctex:viewFromSource', async (_e, texPath: string, line: number, pdfPath: string) => {
- return new Promise<{ page: number; x: number; y: number } | null>((resolve) => {
- const proc = spawn('synctex', ['view', '-i', `${line}:0:${texPath}`, '-o', pdfPath], {
- env: process.env
- })
- let out = ''
- proc.stdout?.on('data', (d) => { out += d.toString() })
- proc.stderr?.on('data', (d) => { out += d.toString() })
- proc.on('close', () => {
- const pageMatch = out.match(/Page:(\d+)/)
- const xMatch = out.match(/x:([0-9.]+)/)
- const yMatch = out.match(/y:([0-9.]+)/)
- if (pageMatch) {
- resolve({
- page: parseInt(pageMatch[1]),
- x: xMatch ? parseFloat(xMatch[1]) : 0,
- y: yMatch ? parseFloat(yMatch[1]) : 0
- })
- } else {
- resolve(null)
- }
- })
- proc.on('error', () => resolve(null))
- })
-})
-
// ── Terminal / PTY ───────────────────────────────────────────────
ipcMain.handle('pty:spawn', async (_e, cwd: string) => {
@@ -393,161 +119,844 @@ ipcMain.handle('pty:kill', async () => {
ptyInstance = null
})
-// ── Overleaf / Git Sync ──────────────────────────────────────────
+// ── Overleaf Web Session (for comments) ─────────────────────────
+
+let overleafSessionCookie = ''
+let overleafCsrfToken = ''
+
+// Persist cookie to disk
+const cookiePath = join(app.getPath('userData'), 'overleaf-session.json')
+
+async function saveOverleafSession(): Promise<void> {
+ try {
+ await writeFile(cookiePath, JSON.stringify({ cookie: overleafSessionCookie, csrf: overleafCsrfToken }))
+ } catch { /* ignore */ }
+}
+
+let sessionLoadPromise: Promise<void> | null = null
-// Helper: run git with explicit credentials via a temp credential helper script
-function gitWithCreds(args: string[], email: string, password: string, cwd?: string): Promise<{ success: boolean; message: string }> {
+async function loadOverleafSession(): Promise<void> {
+ try {
+ const raw = await readFile(cookiePath, 'utf-8')
+ const data = JSON.parse(raw)
+ if (data.cookie) {
+ overleafSessionCookie = data.cookie
+ overleafCsrfToken = data.csrf || ''
+ console.log('[overleaf] loaded saved session, verifying...')
+ // Verify it's still valid
+ const result = await overleafFetch('/user/projects')
+ if (!result.ok) {
+ console.log('[overleaf] saved session expired (status:', result.status, ')')
+ overleafSessionCookie = ''
+ overleafCsrfToken = ''
+ } else {
+ console.log('[overleaf] saved session is valid')
+ }
+ }
+ } catch { /* no saved session */ }
+}
+
+// Helper: make authenticated request to Overleaf web API
+async function overleafFetch(path: string, options: { method?: string; body?: string; raw?: boolean; cookie?: string } = {}): Promise<{ ok: boolean; status: number; data: unknown; setCookies: string[] }> {
return new Promise((resolve) => {
- // Use inline credential helper that echoes stored creds
- const helper = `!f() { echo "username=${email}"; echo "password=${password}"; }; f`
- const fullArgs = ['-c', `credential.helper=${helper}`, ...args]
- console.log('[git]', args[0], args.slice(1).join(' ').replace(password, '***'))
- const proc = spawn('git', fullArgs, {
- cwd,
- env: { ...process.env, GIT_TERMINAL_PROMPT: '0' }
- })
- let output = ''
- proc.stdout?.on('data', (d) => { output += d.toString() })
- proc.stderr?.on('data', (d) => { output += d.toString() })
- proc.on('close', (code) => {
- console.log('[git] exit code:', code, 'output:', output.slice(0, 300))
- resolve({ success: code === 0, message: output })
+ const url = `https://www.overleaf.com${path}`
+ const request = net.request({ url, method: options.method || 'GET' })
+ request.setHeader('Cookie', options.cookie || overleafSessionCookie)
+ request.setHeader('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.6723.191 Safari/537.36')
+ if (!options.raw) {
+ request.setHeader('Accept', 'application/json')
+ }
+ if (options.body) {
+ request.setHeader('Content-Type', options.raw ? 'text/plain; charset=UTF-8' : 'application/json')
+ }
+ if (overleafCsrfToken && options.method && options.method !== 'GET') {
+ request.setHeader('x-csrf-token', overleafCsrfToken)
+ }
+
+ let body = ''
+ request.on('response', (response) => {
+ const sc = response.headers['set-cookie']
+ const setCookies = Array.isArray(sc) ? sc : sc ? [sc] : []
+ response.on('data', (chunk) => { body += chunk.toString() })
+ response.on('end', () => {
+ let data: unknown = body
+ if (!options.raw) {
+ try { data = JSON.parse(body) } catch { /* not json */ }
+ }
+ resolve({ ok: response.statusCode >= 200 && response.statusCode < 300, status: response.statusCode, data, setCookies })
+ })
})
- proc.on('error', (err) => {
- console.log('[git] error:', err.message)
- resolve({ success: false, message: err.message })
+ request.on('error', (err) => {
+ resolve({ ok: false, status: 0, data: err.message, setCookies: [] })
})
+
+ if (options.body) request.write(options.body)
+ request.end()
})
}
-// Helper: run git with osxkeychain (for after credentials are stored)
-function gitSpawn(args: string[], cwd?: string): Promise<{ success: boolean; message: string }> {
- return new Promise((resolve) => {
- const fullArgs = ['-c', 'credential.helper=osxkeychain', ...args]
- const proc = spawn('git', fullArgs, {
- cwd,
- env: { ...process.env, GIT_TERMINAL_PROMPT: '0' }
+// Login via webview — opens Overleaf login page, captures session cookie
+ipcMain.handle('overleaf:webLogin', async () => {
+ return new Promise<{ success: boolean }>((resolve) => {
+ const loginWindow = new BrowserWindow({
+ width: 900,
+ height: 750,
+ parent: mainWindow!,
+ modal: true,
+ webPreferences: { nodeIntegration: false, contextIsolation: true }
})
- let output = ''
- proc.stdout?.on('data', (d) => { output += d.toString() })
- proc.stderr?.on('data', (d) => { output += d.toString() })
- proc.on('close', (code) => {
- resolve({ success: code === 0, message: output })
- })
- proc.on('error', (err) => {
- resolve({ success: false, message: err.message })
+
+ loginWindow.loadURL('https://www.overleaf.com/login')
+
+ // Inject a floating back button when navigated away from overleaf.com
+ const injectBackButton = () => {
+ loginWindow.webContents.executeJavaScript(`
+ if (!document.getElementById('claudetex-back-btn')) {
+ const btn = document.createElement('div');
+ btn.id = 'claudetex-back-btn';
+ btn.innerHTML = '← Back';
+ btn.style.cssText = 'position:fixed;top:8px;left:8px;z-index:999999;padding:6px 14px;' +
+ 'background:#333;color:#fff;border-radius:6px;cursor:pointer;font:13px -apple-system,sans-serif;' +
+ 'box-shadow:0 2px 8px rgba(0,0,0,.3);user-select:none;-webkit-app-region:no-drag;';
+ btn.addEventListener('click', () => history.back());
+ btn.addEventListener('mouseenter', () => btn.style.background = '#555');
+ btn.addEventListener('mouseleave', () => btn.style.background = '#333');
+ document.body.appendChild(btn);
+ }
+ `).catch(() => {})
+ }
+
+ loginWindow.webContents.on('did-finish-load', injectBackButton)
+ loginWindow.webContents.on('did-navigate-in-page', injectBackButton)
+
+ // Verify cookie by calling Overleaf API — only succeed if we get 200
+ const verifyAndCapture = async (): Promise<boolean> => {
+ const cookies = await loginWindow.webContents.session.cookies.get({ domain: '.overleaf.com' })
+ if (!cookies.find((c) => c.name === 'overleaf_session2')) return false
+
+ const testCookie = cookies.map((c) => `${c.name}=${c.value}`).join('; ')
+ // Test if this cookie is actually authenticated
+ const ok = await new Promise<boolean>((res) => {
+ const req = net.request({ url: 'https://www.overleaf.com/user/projects', method: 'GET' })
+ req.setHeader('Cookie', testCookie)
+ req.setHeader('Accept', 'application/json')
+ req.on('response', (resp) => {
+ resp.on('data', () => {})
+ resp.on('end', () => res(resp.statusCode === 200))
+ })
+ req.on('error', () => res(false))
+ req.end()
+ })
+
+ if (!ok) return false
+
+ overleafSessionCookie = testCookie
+ // Get CSRF from meta tag if we're on an Overleaf page
+ try {
+ const csrf = await loginWindow.webContents.executeJavaScript(
+ `document.querySelector('meta[name="ol-csrfToken"]')?.content || ''`
+ )
+ if (csrf) overleafCsrfToken = csrf
+ } catch { /* ignore */ }
+
+ // If no CSRF from page, fetch from /project page
+ if (!overleafCsrfToken) {
+ await new Promise<void>((res) => {
+ const req = net.request({ url: 'https://www.overleaf.com/project', method: 'GET' })
+ req.setHeader('Cookie', overleafSessionCookie)
+ let body = ''
+ req.on('response', (resp) => {
+ resp.on('data', (chunk) => { body += chunk.toString() })
+ resp.on('end', () => {
+ const m = body.match(/ol-csrfToken[^>]*content="([^"]+)"/)
+ if (m) overleafCsrfToken = m[1]
+ res()
+ })
+ })
+ req.on('error', () => res())
+ req.end()
+ })
+ }
+
+ return true
+ }
+
+ let resolved = false
+ const tryCapture = async () => {
+ if (resolved) return
+ const ok = await verifyAndCapture()
+ if (ok && !resolved) {
+ resolved = true
+ saveOverleafSession()
+ loginWindow.close()
+ resolve({ success: true })
+ }
+ }
+
+ loginWindow.webContents.on('did-navigate', () => { setTimeout(tryCapture, 2000) })
+ loginWindow.webContents.on('did-navigate-in-page', () => { setTimeout(tryCapture, 2000) })
+
+ loginWindow.on('closed', () => {
+ if (!overleafSessionCookie) resolve({ success: false })
})
})
-}
+})
-// Store credentials in macOS Keychain (no verification — that happens in overleaf:cloneWithAuth)
-function storeCredentials(email: string, password: string): Promise<boolean> {
- return new Promise((resolve) => {
- // Erase old first
- const erase = spawn('git', ['credential-osxkeychain', 'erase'])
- erase.stdin?.write(`protocol=https\nhost=git.overleaf.com\n\n`)
- erase.stdin?.end()
- erase.on('close', () => {
- const store = spawn('git', ['credential-osxkeychain', 'store'])
- store.stdin?.write(`protocol=https\nhost=git.overleaf.com\nusername=${email}\npassword=${password}\n\n`)
- store.stdin?.end()
- store.on('close', (code) => resolve(code === 0))
- })
+// Check if web session is active — wait for startup load to finish
+ipcMain.handle('overleaf:hasWebSession', async () => {
+ if (sessionLoadPromise) await sessionLoadPromise
+ return { loggedIn: !!overleafSessionCookie }
+})
+
+// Fetch all comment threads for a project
+ipcMain.handle('overleaf:getThreads', async (_e, projectId: string) => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+ const result = await overleafFetch(`/project/${projectId}/threads`)
+ if (!result.ok) return { success: false, message: `HTTP ${result.status}` }
+ return { success: true, threads: result.data }
+})
+
+// Reply to a thread
+ipcMain.handle('overleaf:replyThread', async (_e, projectId: string, threadId: string, content: string) => {
+ if (!overleafSessionCookie) return { success: false }
+ const result = await overleafFetch(`/project/${projectId}/thread/${threadId}/messages`, {
+ method: 'POST',
+ body: JSON.stringify({ content })
+ })
+ return { success: result.ok, data: result.data }
+})
+
+// Resolve a thread
+ipcMain.handle('overleaf:resolveThread', async (_e, projectId: string, threadId: string) => {
+ if (!overleafSessionCookie) return { success: false }
+ const result = await overleafFetch(`/project/${projectId}/thread/${threadId}/resolve`, {
+ method: 'POST',
+ body: '{}'
})
+ return { success: result.ok }
+})
+
+// Reopen a thread
+ipcMain.handle('overleaf:reopenThread', async (_e, projectId: string, threadId: string) => {
+ if (!overleafSessionCookie) return { success: false }
+ const result = await overleafFetch(`/project/${projectId}/thread/${threadId}/reopen`, {
+ method: 'POST',
+ body: '{}'
+ })
+ return { success: result.ok }
+})
+
+// Delete a comment message
+ipcMain.handle('overleaf:deleteMessage', async (_e, projectId: string, threadId: string, messageId: string) => {
+ if (!overleafSessionCookie) return { success: false }
+ const result = await overleafFetch(`/project/${projectId}/thread/${threadId}/messages/${messageId}`, {
+ method: 'DELETE'
+ })
+ return { success: result.ok }
+})
+
+// Edit a comment message
+ipcMain.handle('overleaf:editMessage', async (_e, projectId: string, threadId: string, messageId: string, content: string) => {
+ if (!overleafSessionCookie) return { success: false }
+ const result = await overleafFetch(`/project/${projectId}/thread/${threadId}/messages/${messageId}/edit`, {
+ method: 'POST',
+ body: JSON.stringify({ content })
+ })
+ return { success: result.ok }
+})
+
+// Delete entire thread
+ipcMain.handle('overleaf:deleteThread', async (_e, projectId: string, docId: string, threadId: string) => {
+ if (!overleafSessionCookie) return { success: false }
+ const result = await overleafFetch(`/project/${projectId}/doc/${docId}/thread/${threadId}`, {
+ method: 'DELETE'
+ })
+ return { success: result.ok }
+})
+
+// Add a new comment: create thread via REST then submit op via Socket.IO
+async function addComment(
+ projectId: string,
+ docId: string,
+ pos: number,
+ text: string,
+ content: string
+): Promise<{ success: boolean; threadId?: string; message?: string }> {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+
+ // Generate a random threadId (24-char hex like Mongo ObjectId)
+ const threadId = Array.from({ length: 24 }, () => Math.floor(Math.random() * 16).toString(16)).join('')
+
+ // Step 1: Create the thread message via REST
+ const msgResult = await overleafFetch(`/project/${projectId}/thread/${threadId}/messages`, {
+ method: 'POST',
+ body: JSON.stringify({ content })
+ })
+ if (!msgResult.ok) return { success: false, message: `REST failed: ${msgResult.status}` }
+
+ // Step 2: Submit the comment op via Socket.IO WebSocket
+ const hsRes = await overleafFetch(`/socket.io/1/?t=${Date.now()}&projectId=${projectId}`, { raw: true })
+ if (!hsRes.ok) return { success: false, message: 'handshake failed' }
+ const sid = (hsRes.data as string).split(':')[0]
+ if (!sid) return { success: false, message: 'no sid' }
+
+ const { session: electronSession } = await import('electron')
+ const ses = electronSession.fromPartition('overleaf-sio-add-' + Date.now())
+
+ ses.webRequest.onHeadersReceived((details, callback) => {
+ const headers = { ...details.responseHeaders }
+ delete headers['set-cookie']
+ delete headers['Set-Cookie']
+ callback({ responseHeaders: headers })
+ })
+
+ const allCookieParts = overleafSessionCookie.split('; ')
+ for (const sc of hsRes.setCookies) {
+ allCookieParts.push(sc.split(';')[0])
+ }
+ for (const pair of allCookieParts) {
+ const eqIdx = pair.indexOf('=')
+ if (eqIdx < 0) continue
+ try {
+ await ses.cookies.set({
+ url: 'https://www.overleaf.com',
+ name: pair.substring(0, eqIdx),
+ value: pair.substring(eqIdx + 1),
+ domain: '.overleaf.com',
+ path: '/',
+ secure: true
+ })
+ } catch { /* ignore */ }
+ }
+
+ const win = new BrowserWindow({
+ width: 800, height: 600, show: false,
+ webPreferences: { nodeIntegration: false, contextIsolation: false, session: ses }
+ })
+
+ try {
+ win.webContents.on('console-message', (_e, _level, msg) => {
+ console.log('[overleaf-add-comment]', msg)
+ })
+ await win.loadURL('https://www.overleaf.com/login')
+
+ const script = `
+ new Promise(async (mainResolve) => {
+ try {
+ var ws = new WebSocket('wss://' + location.host + '/socket.io/1/websocket/${sid}');
+ var ackId = 0, ackCbs = {}, evtCbs = {};
+
+ ws.onmessage = function(e) {
+ var d = e.data;
+ if (d === '2::') { ws.send('2::'); return; }
+ if (d === '1::') return;
+ var am = d.match(/^6:::(\\d+)\\+([\\s\\S]*)/);
+ if (am) {
+ var cb = ackCbs[parseInt(am[1])];
+ if (cb) { delete ackCbs[parseInt(am[1])]; try { cb(JSON.parse(am[2])); } catch(e2) { cb(null); } }
+ return;
+ }
+ var em2 = d.match(/^5:::(\\{[\\s\\S]*\\})/);
+ if (em2) {
+ try {
+ var evt = JSON.parse(em2[1]);
+ var ecb = evtCbs[evt.name];
+ if (ecb) { delete evtCbs[evt.name]; ecb(evt.args); }
+ } catch(e3) {}
+ }
+ };
+
+ function emitAck(name, args) {
+ return new Promise(function(res) { ackId++; ackCbs[ackId] = res;
+ ws.send('5:' + ackId + '+::' + JSON.stringify({ name: name, args: args })); });
+ }
+ function waitEvent(name) {
+ return new Promise(function(res) { evtCbs[name] = res; });
+ }
+
+ ws.onerror = function() { mainResolve({ error: 'ws_error' }); };
+ ws.onclose = function(ev) { console.log('ws closed: ' + ev.code); };
+
+ ws.onopen = async function() {
+ try {
+ var jpPromise = waitEvent('joinProjectResponse');
+ ws.send('5:::' + JSON.stringify({ name: 'joinProject', args: [{ project_id: '${projectId}' }] }));
+ await jpPromise;
+
+ // Join the doc to submit the op
+ await emitAck('joinDoc', ['${docId}']);
+
+ // Submit the comment op
+ var commentOp = { c: ${JSON.stringify(text)}, p: ${pos}, t: '${threadId}' };
+ console.log('submitting op: ' + JSON.stringify(commentOp));
+ await emitAck('applyOtUpdate', ['${docId}', { doc: '${docId}', op: [commentOp], v: 0 }]);
+
+ await emitAck('leaveDoc', ['${docId}']);
+ ws.close();
+ mainResolve({ success: true });
+ } catch (e) { ws.close(); mainResolve({ error: e.message }); }
+ };
+ setTimeout(function() { ws.close(); mainResolve({ error: 'timeout' }); }, 30000);
+ } catch (e) { mainResolve({ error: e.message }); }
+ });
+ `
+
+ const result = await win.webContents.executeJavaScript(script)
+ console.log('[overleaf] addComment result:', result)
+
+ if (result?.error) return { success: false, message: result.error }
+ return { success: true, threadId }
+ } catch (e) {
+ console.log('[overleaf] addComment error:', e)
+ return { success: false, message: String(e) }
+ } finally {
+ win.close()
+ }
+}
+
+ipcMain.handle('overleaf:addComment', async (_e, projectId: string, docId: string, pos: number, text: string, content: string) => {
+ return addComment(projectId, docId, pos, text, content)
+})
+
+// ── OT / Socket Mode IPC ─────────────────────────────────────────
+
+interface SocketFileNode {
+ name: string
+ path: string
+ isDir: boolean
+ children?: SocketFileNode[]
+ docId?: string
+ fileRefId?: string
+ folderId?: string
}
-// Verify credentials + project access using git ls-remote, then clone
-// Overleaf git auth: username is always literal "git", password is the token
-ipcMain.handle('overleaf:cloneWithAuth', async (_e, projectId: string, dest: string, token: string, remember: boolean) => {
- const repoUrl = `https://git.overleaf.com/${projectId}`
- console.log('[overleaf:cloneWithAuth] Verifying access to:', projectId)
-
- // Step 1: ls-remote to verify both auth and project access
- // Username must be "git" (not email), password is the olp_ token
- const verify = await gitWithCreds(['ls-remote', '--heads', repoUrl], 'git', token)
-
- if (!verify.success) {
- const msg = verify.message
- console.log('[overleaf:cloneWithAuth] ls-remote failed:', msg)
- if (msg.includes('only supports Git authentication tokens') || msg.includes('token')) {
- return { success: false, message: 'need_token', detail: 'Overleaf requires a Git Authentication Token (not your password).\n\n1. Go to Overleaf → Account Settings\n2. Find "Git Integration"\n3. Generate a token and paste it here.' }
+function walkRootFolder(folders: RootFolder[]): {
+ files: SocketFileNode[]
+ docPathMap: Record<string, string>
+ pathDocMap: Record<string, string>
+ fileRefs: Array<{ id: string; path: string }>
+ rootFolderId: string
+} {
+ const docPathMap: Record<string, string> = {}
+ const pathDocMap: Record<string, string> = {}
+ const fileRefs: Array<{ id: string; path: string }> = []
+
+ function walkFolder(f: SubFolder | RootFolder, prefix: string): SocketFileNode[] {
+ const nodes: SocketFileNode[] = []
+
+ for (const doc of f.docs || []) {
+ const relPath = prefix + doc.name
+ docPathMap[doc._id] = relPath
+ pathDocMap[relPath] = doc._id
+ nodes.push({
+ name: doc.name,
+ path: relPath,
+ isDir: false,
+ docId: doc._id
+ })
}
- if (msg.includes('Authentication failed') || msg.includes('401') || msg.includes('403') || msg.includes('could not read')) {
- return { success: false, message: 'auth_failed', detail: 'Authentication failed. Make sure you are using a Git Authentication Token, not your Overleaf password.' }
+
+ for (const ref of f.fileRefs || []) {
+ const relPath = prefix + ref.name
+ fileRefs.push({ id: ref._id, path: relPath })
+ nodes.push({
+ name: ref.name,
+ path: relPath,
+ isDir: false,
+ fileRefId: ref._id
+ })
}
- if (msg.includes('not found') || msg.includes('does not appear to be a git repository')) {
- return { success: false, message: 'not_found', detail: 'Project not found. Check the URL and ensure you have access.' }
+
+ for (const sub of f.folders || []) {
+ const relPath = prefix + sub.name + '/'
+ const children = walkFolder(sub, relPath)
+ nodes.push({
+ name: sub.name,
+ path: relPath,
+ isDir: true,
+ children,
+ folderId: sub._id
+ })
}
- return { success: false, message: 'error', detail: msg }
- }
- console.log('[overleaf:cloneWithAuth] Auth verified. Storing credentials and cloning...')
+ return nodes
+ }
- // Step 2: Credentials work — store in keychain if requested
- if (remember) {
- await storeCredentials('git', token)
- console.log('[overleaf:cloneWithAuth] Token saved to Keychain')
+ const files: SocketFileNode[] = []
+ const rootFolderId = folders[0]?._id || ''
+ for (const root of folders) {
+ files.push(...walkFolder(root, ''))
}
- // Step 3: Clone using keychain credentials
- const result = await gitSpawn(['clone', repoUrl, dest])
- if (result.success) {
- return { success: true, message: 'ok', detail: '' }
- } else {
- return { success: false, message: 'clone_failed', detail: result.message }
+ return { files, docPathMap, pathDocMap, fileRefs, rootFolderId }
+}
+
+ipcMain.handle('ot:connect', async (_e, projectId: string) => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+
+ try {
+ overleafSock = new OverleafSocket()
+
+ // Relay events to renderer
+ overleafSock.on('connectionState', (state: string) => {
+ mainWindow?.webContents.send('ot:connectionState', state)
+ })
+
+ // otUpdateApplied: server acknowledges our op (ack signal for OT client)
+ overleafSock.on('serverEvent', (name: string, args: unknown[]) => {
+ if (name === 'otUpdateApplied') {
+ const update = args[0] as { doc?: string; v?: number } | undefined
+ if (update?.doc) {
+ mainWindow?.webContents.send('ot:ack', { docId: update.doc })
+ }
+ }
+ })
+
+ overleafSock.on('docRejoined', (docId: string, result: JoinDocResult) => {
+ mainWindow?.webContents.send('ot:docRejoined', {
+ docId,
+ content: result.docLines.join('\n'),
+ version: result.version
+ })
+ })
+
+ const projectResult = await overleafSock.connect(projectId, overleafSessionCookie)
+ const { files, docPathMap, pathDocMap, fileRefs, rootFolderId } = walkRootFolder(projectResult.project.rootFolder)
+
+ // Set up compilation manager
+ compilationManager = new CompilationManager(projectId, overleafSessionCookie)
+
+ // Set up file sync bridge for bidirectional sync
+ const tmpDir = compilationManager.dir
+ fileSyncBridge = new FileSyncBridge(overleafSock, tmpDir, docPathMap, pathDocMap, mainWindow!)
+ fileSyncBridge.start().catch((e) => {
+ console.log('[ot:connect] fileSyncBridge start error:', e)
+ })
+
+ return {
+ success: true,
+ files,
+ project: {
+ name: projectResult.project.name,
+ rootDocId: projectResult.project.rootDoc_id
+ },
+ docPathMap,
+ pathDocMap,
+ fileRefs,
+ rootFolderId
+ }
+ } catch (e) {
+ console.log('[ot:connect] error:', e)
+ return { success: false, message: String(e) }
}
})
-// Check if credentials exist in Keychain
-ipcMain.handle('overleaf:check', async () => {
- return new Promise<{ loggedIn: boolean; email: string }>((resolve) => {
- const proc = spawn('git', ['credential-osxkeychain', 'get'])
- let out = ''
- proc.stdout?.on('data', (d) => { out += d.toString() })
- proc.stdin?.write(`protocol=https\nhost=git.overleaf.com\n\n`)
- proc.stdin?.end()
- proc.on('close', (code) => {
- if (code === 0 && out.includes('username=')) {
- const match = out.match(/username=(.+)/)
- resolve({ loggedIn: true, email: match?.[1]?.trim() ?? '' })
- } else {
- resolve({ loggedIn: false, email: '' })
+ipcMain.handle('ot:disconnect', async () => {
+ await fileSyncBridge?.stop()
+ fileSyncBridge = null
+ overleafSock?.disconnect()
+ overleafSock = null
+ await compilationManager?.cleanup()
+ compilationManager = null
+})
+
+// Track per-doc event handlers for cleanup on leaveDoc
+const docEventHandlers = new Map<string, (name: string, args: unknown[]) => void>()
+
+ipcMain.handle('ot:joinDoc', async (_e, docId: string) => {
+ if (!overleafSock) return { success: false, message: 'not_connected' }
+
+ try {
+ const result = await overleafSock.joinDoc(docId)
+ const content = (result.docLines || []).join('\n')
+
+ // Update compilation manager with doc content
+ if (compilationManager && overleafSock.projectData) {
+ const { docPathMap } = walkRootFolder(overleafSock.projectData.project.rootFolder)
+ const relPath = docPathMap[docId]
+ if (relPath) {
+ compilationManager.setDocContent(relPath, content)
}
+ }
+
+ // Notify bridge that editor is taking over this doc
+ fileSyncBridge?.addEditorDoc(docId)
+
+ // Remove existing handler if rejoining
+ const existingHandler = docEventHandlers.get(docId)
+ if (existingHandler) overleafSock.removeListener('serverEvent', existingHandler)
+
+ // Set up relay for remote ops on this doc
+ const handler = (name: string, args: unknown[]) => {
+ if (name === 'otUpdateApplied') {
+ const update = args[0] as { doc?: string; op?: unknown[]; v?: number } | undefined
+ if (update?.doc === docId && update.op) {
+ mainWindow?.webContents.send('ot:remoteOp', {
+ docId: update.doc,
+ ops: update.op,
+ version: update.v
+ })
+ }
+ }
+ }
+ docEventHandlers.set(docId, handler)
+ overleafSock.on('serverEvent', handler)
+
+ return {
+ success: true,
+ content,
+ version: result.version,
+ ranges: result.ranges
+ }
+ } catch (e) {
+ console.log('[ot:joinDoc] error:', e)
+ return { success: false, message: String(e) }
+ }
+})
+
+ipcMain.handle('ot:leaveDoc', async (_e, docId: string) => {
+ if (!overleafSock) return
+ try {
+ // Remove event handler for this doc
+ const handler = docEventHandlers.get(docId)
+ if (handler) {
+ overleafSock.removeListener('serverEvent', handler)
+ docEventHandlers.delete(docId)
+ }
+ // Bridge takes back OT ownership — do NOT leaveDoc on the socket,
+ // the bridge keeps the doc joined for sync
+ fileSyncBridge?.removeEditorDoc(docId)
+ } catch (e) {
+ console.log('[ot:leaveDoc] error:', e)
+ }
+})
+
+ipcMain.handle('ot:sendOp', async (_e, docId: string, ops: unknown[], version: number, hash: string) => {
+ if (!overleafSock) return
+ try {
+ await overleafSock.applyOtUpdate(docId, ops, version, hash)
+ } catch (e) {
+ console.log('[ot:sendOp] error:', e)
+ }
+})
+
+// Renderer → bridge: editor content changed (for disk sync)
+ipcMain.handle('sync:contentChanged', async (_e, docId: string, content: string) => {
+ fileSyncBridge?.onEditorContentChanged(docId, content)
+})
+
+ipcMain.handle('overleaf:listProjects', async () => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+
+ // POST /api/project returns full project data (lastUpdated, owner, etc.)
+ const result = await overleafFetch('/api/project', {
+ method: 'POST',
+ body: JSON.stringify({
+ filters: {},
+ page: { size: 200 },
+ sort: { by: 'lastUpdated', order: 'desc' }
+ })
+ })
+ if (!result.ok) return { success: false, message: `HTTP ${result.status}` }
+
+ const data = result.data as { totalSize?: number; projects?: unknown[] }
+ const projects = (data.projects || []) as Array<{
+ id?: string; _id?: string; name: string; lastUpdated: string
+ owner?: { firstName: string; lastName: string; email?: string }
+ lastUpdatedBy?: { firstName: string; lastName: string; email?: string } | null
+ accessLevel?: string
+ source?: string
+ }>
+
+ return {
+ success: true,
+ projects: projects.map((p) => ({
+ id: p.id || p._id || '',
+ name: p.name,
+ lastUpdated: p.lastUpdated,
+ owner: p.owner ? { firstName: p.owner.firstName, lastName: p.owner.lastName, email: p.owner.email } : undefined,
+ lastUpdatedBy: p.lastUpdatedBy ? { firstName: p.lastUpdatedBy.firstName, lastName: p.lastUpdatedBy.lastName } : null,
+ accessLevel: p.accessLevel || 'unknown',
+ source: p.source || ''
+ }))
+ }
+})
+
+ipcMain.handle('overleaf:createProject', async (_e, name: string) => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+ const result = await overleafFetch('/api/project/new', {
+ method: 'POST',
+ body: JSON.stringify({ projectName: name })
+ })
+ if (!result.ok) return { success: false, message: `HTTP ${result.status}` }
+ const data = result.data as { project_id?: string; _id?: string }
+ return { success: true, projectId: data.project_id || data._id }
+})
+
+ipcMain.handle('overleaf:uploadProject', async () => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+
+ const { canceled, filePaths } = await dialog.showOpenDialog({
+ title: 'Upload Project (.zip)',
+ filters: [{ name: 'ZIP Archives', extensions: ['zip'] }],
+ properties: ['openFile']
+ })
+ if (canceled || filePaths.length === 0) return { success: false, message: 'cancelled' }
+
+ const zipPath = filePaths[0]
+ const zipData = await readFile(zipPath)
+ const fileName = basename(zipPath)
+
+ // Multipart upload
+ const boundary = '----FormBoundary' + Math.random().toString(36).slice(2)
+ const header = `--${boundary}\r\nContent-Disposition: form-data; name="qqfile"; filename="${fileName}"\r\nContent-Type: application/zip\r\n\r\n`
+ const footer = `\r\n--${boundary}--\r\n`
+ const headerBuf = Buffer.from(header)
+ const footerBuf = Buffer.from(footer)
+ const body = Buffer.concat([headerBuf, zipData, footerBuf])
+
+ return new Promise((resolve) => {
+ const req = net.request({
+ method: 'POST',
+ url: 'https://www.overleaf.com/api/project/new/upload'
})
- proc.on('error', () => {
- resolve({ loggedIn: false, email: '' })
+ req.setHeader('Cookie', overleafSessionCookie)
+ req.setHeader('Content-Type', `multipart/form-data; boundary=${boundary}`)
+ req.setHeader('User-Agent', 'Mozilla/5.0')
+ if (overleafCsrfToken) req.setHeader('x-csrf-token', overleafCsrfToken)
+
+ let resBody = ''
+ req.on('response', (res) => {
+ res.on('data', (chunk) => { resBody += chunk.toString() })
+ res.on('end', () => {
+ try {
+ const data = JSON.parse(resBody) as { success?: boolean; project_id?: string }
+ if (data.success !== false && data.project_id) {
+ resolve({ success: true, projectId: data.project_id })
+ } else {
+ resolve({ success: false, message: 'Upload failed' })
+ }
+ } catch {
+ resolve({ success: false, message: 'Invalid response' })
+ }
+ })
})
+ req.on('error', (e) => resolve({ success: false, message: String(e) }))
+ req.write(body)
+ req.end()
})
})
-// Remove credentials from Keychain
-ipcMain.handle('overleaf:logout', async () => {
- return new Promise<void>((resolve) => {
- const proc = spawn('git', ['credential-osxkeychain', 'erase'])
- proc.stdin?.write(`protocol=https\nhost=git.overleaf.com\n\n`)
- proc.stdin?.end()
- proc.on('close', () => resolve())
+// ── File Operations via Overleaf REST API ──────────────────────
+
+ipcMain.handle('overleaf:renameEntity', async (_e, projectId: string, entityType: string, entityId: string, newName: string) => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+ const result = await overleafFetch(`/project/${projectId}/${entityType}/${entityId}/rename`, {
+ method: 'POST',
+ body: JSON.stringify({ name: newName })
})
+ return { success: result.ok, message: result.ok ? '' : `HTTP ${result.status}` }
})
-// Git operations for existing repos — use osxkeychain
-ipcMain.handle('git:pull', async (_e, cwd: string) => {
- return gitSpawn(['pull'], cwd)
+ipcMain.handle('overleaf:deleteEntity', async (_e, projectId: string, entityType: string, entityId: string) => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+ const result = await overleafFetch(`/project/${projectId}/${entityType}/${entityId}`, {
+ method: 'DELETE'
+ })
+ return { success: result.ok, message: result.ok ? '' : `HTTP ${result.status}` }
})
-ipcMain.handle('git:push', async (_e, cwd: string) => {
- const add = await gitSpawn(['add', '-A'], cwd)
- if (!add.success) return add
- await gitSpawn(['commit', '-m', `Sync from ClaudeTeX ${new Date().toISOString()}`], cwd)
- return gitSpawn(['push'], cwd)
+ipcMain.handle('overleaf:createDoc', async (_e, projectId: string, parentFolderId: string, name: string) => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+ const result = await overleafFetch(`/project/${projectId}/doc`, {
+ method: 'POST',
+ body: JSON.stringify({ name, parent_folder_id: parentFolderId })
+ })
+ return { success: result.ok, data: result.data, message: result.ok ? '' : `HTTP ${result.status}` }
+})
+
+ipcMain.handle('overleaf:createFolder', async (_e, projectId: string, parentFolderId: string, name: string) => {
+ if (!overleafSessionCookie) return { success: false, message: 'not_logged_in' }
+ const result = await overleafFetch(`/project/${projectId}/folder`, {
+ method: 'POST',
+ body: JSON.stringify({ name, parent_folder_id: parentFolderId })
+ })
+ return { success: result.ok, data: result.data, message: result.ok ? '' : `HTTP ${result.status}` }
})
-ipcMain.handle('git:status', async (_e, cwd: string) => {
- const result = await gitSpawn(['status', '--porcelain'], cwd)
- return { isGit: result.success, status: result.message }
+// Fetch comment ranges from ALL docs (for ReviewPanel)
+ipcMain.handle('ot:fetchAllCommentContexts', async () => {
+ if (!overleafSock?.projectData) return { success: false }
+
+ const { docPathMap } = walkRootFolder(overleafSock.projectData.project.rootFolder)
+ const contexts: Record<string, { file: string; text: string; pos: number }> = {}
+
+ for (const [docId, relPath] of Object.entries(docPathMap)) {
+ try {
+ const alreadyJoined = docEventHandlers.has(docId)
+ const result = await overleafSock.joinDoc(docId)
+ if (result.ranges?.comments) {
+ for (const c of result.ranges.comments) {
+ if (c.op?.t) {
+ contexts[c.op.t] = { file: relPath, text: c.op.c || '', pos: c.op.p || 0 }
+ }
+ }
+ }
+ if (!alreadyJoined) {
+ await overleafSock.leaveDoc(docId)
+ }
+ } catch (e) {
+ console.log(`[fetchCommentContexts] failed for ${relPath}:`, e)
+ }
+ }
+
+ return { success: true, contexts }
})
-// ── Shell: open external ─────────────────────────────────────────
+ipcMain.handle('overleaf:socketCompile', async (_e, mainTexRelPath: string) => {
+ if (!compilationManager || !overleafSock?.projectData) {
+ return { success: false, log: 'No compilation manager or not connected', pdfPath: '' }
+ }
+
+ const { docPathMap, fileRefs } = walkRootFolder(overleafSock.projectData.project.rootFolder)
+
+ // Bridge already keeps all docs synced to disk. Sync content to compilation manager.
+ if (fileSyncBridge) {
+ for (const [docId, relPath] of Object.entries(docPathMap)) {
+ const content = fileSyncBridge.getDocContent(relPath)
+ if (content !== undefined) {
+ compilationManager.setDocContent(relPath, content)
+ }
+ }
+ } else {
+ // Fallback: fetch docs from socket if bridge isn't available
+ const allDocIds = Object.keys(docPathMap)
+ for (const docId of allDocIds) {
+ const relPath = docPathMap[docId]
+ if (docEventHandlers.has(docId) && compilationManager.hasDoc(relPath)) continue
+ try {
+ const alreadyJoined = docEventHandlers.has(docId)
+ const result = await overleafSock.joinDoc(docId)
+ const content = (result.docLines || []).join('\n')
+ compilationManager.setDocContent(relPath, content)
+ if (!alreadyJoined) {
+ await overleafSock.leaveDoc(docId)
+ }
+ } catch (e) {
+ console.log(`[socketCompile] failed to fetch doc ${relPath}:`, e)
+ }
+ }
+ }
+
+ // Download all binary files (images, .bst, etc.)
+ await compilationManager.syncBinaries(fileRefs)
+
+ return compilationManager.compile(mainTexRelPath, (data) => {
+ mainWindow?.webContents.send('latex:log', data)
+ })
+})
+
+/// ── Shell: open external ─────────────────────────────────────────
ipcMain.handle('shell:openExternal', async (_e, url: string) => {
await shell.openExternal(url)
@@ -559,12 +968,18 @@ ipcMain.handle('shell:showInFinder', async (_e, path: string) => {
// ── App Lifecycle ────────────────────────────────────────────────
-app.whenReady().then(createWindow)
+app.whenReady().then(async () => {
+ createWindow()
+ sessionLoadPromise = loadOverleafSession()
+
+})
app.on('window-all-closed', () => {
ptyInstance?.kill()
- fileWatcher?.close()
- compileProcess?.kill()
+ fileSyncBridge?.stop()
+ fileSyncBridge = null
+ overleafSock?.disconnect()
+ compilationManager?.cleanup()
app.quit()
})
diff --git a/src/main/otClient.ts b/src/main/otClient.ts
new file mode 100644
index 0000000..7985c66
--- /dev/null
+++ b/src/main/otClient.ts
@@ -0,0 +1,131 @@
+// OT state machine for main process (mirror of renderer otClient)
+import type { OtOp } from './otTypes'
+import { transformOps } from './otTransform'
+
+export type SendFn = (ops: OtOp[], version: number) => void
+export type ApplyFn = (ops: OtOp[]) => void
+
+interface OtState {
+ name: 'synchronized' | 'awaitingConfirm' | 'awaitingWithBuffer'
+ inflight: OtOp[] | null
+ buffer: OtOp[] | null
+ version: number
+}
+
+export class OtClient {
+ private state: OtState
+ private sendFn: SendFn
+ private applyFn: ApplyFn
+
+ constructor(version: number, sendFn: SendFn, applyFn: ApplyFn) {
+ this.state = { name: 'synchronized', inflight: null, buffer: null, version }
+ this.sendFn = sendFn
+ this.applyFn = applyFn
+ }
+
+ get version(): number {
+ return this.state.version
+ }
+
+ get stateName(): string {
+ return this.state.name
+ }
+
+ onLocalOps(ops: OtOp[]) {
+ if (ops.length === 0) return
+
+ switch (this.state.name) {
+ case 'synchronized':
+ this.state = {
+ name: 'awaitingConfirm',
+ inflight: ops,
+ buffer: null,
+ version: this.state.version
+ }
+ this.sendFn(ops, this.state.version)
+ break
+
+ case 'awaitingConfirm':
+ this.state = {
+ name: 'awaitingWithBuffer',
+ inflight: this.state.inflight,
+ buffer: ops,
+ version: this.state.version
+ }
+ break
+
+ case 'awaitingWithBuffer':
+ this.state = {
+ ...this.state,
+ buffer: [...(this.state.buffer || []), ...ops]
+ }
+ break
+ }
+ }
+
+ onAck() {
+ switch (this.state.name) {
+ case 'awaitingConfirm':
+ this.state = {
+ name: 'synchronized',
+ inflight: null,
+ buffer: null,
+ version: this.state.version + 1
+ }
+ break
+
+ case 'awaitingWithBuffer': {
+ const bufferOps = this.state.buffer || []
+ this.state = {
+ name: 'awaitingConfirm',
+ inflight: bufferOps,
+ buffer: null,
+ version: this.state.version + 1
+ }
+ this.sendFn(bufferOps, this.state.version)
+ break
+ }
+
+ case 'synchronized':
+ console.warn('[OtClient:main] unexpected ack in synchronized state')
+ break
+ }
+ }
+
+ onRemoteOps(ops: OtOp[], newVersion: number) {
+ switch (this.state.name) {
+ case 'synchronized':
+ this.state = { ...this.state, version: newVersion }
+ this.applyFn(ops)
+ break
+
+ case 'awaitingConfirm': {
+ const { left: transformedRemote, right: transformedInflight } = transformOps(ops, this.state.inflight || [])
+ this.state = {
+ ...this.state,
+ inflight: transformedInflight,
+ version: newVersion
+ }
+ this.applyFn(transformedRemote)
+ break
+ }
+
+ case 'awaitingWithBuffer': {
+ const { left: remoteAfterInflight, right: inflightAfterRemote } = transformOps(ops, this.state.inflight || [])
+ const { left: remoteAfterBuffer, right: bufferAfterRemote } = transformOps(remoteAfterInflight, this.state.buffer || [])
+ this.state = {
+ ...this.state,
+ inflight: inflightAfterRemote,
+ buffer: bufferAfterRemote,
+ version: newVersion
+ }
+ this.applyFn(remoteAfterBuffer)
+ break
+ }
+ }
+ }
+
+ reset(version: number) {
+ this.state = { name: 'synchronized', inflight: null, buffer: null, version }
+ }
+}
diff --git a/src/main/otTransform.ts b/src/main/otTransform.ts
new file mode 100644
index 0000000..0d05450
--- /dev/null
+++ b/src/main/otTransform.ts
@@ -0,0 +1,117 @@
+// OT transform functions for main process (mirror of renderer transform)
+import type { OtOp } from './otTypes'
+import { isInsert, isDelete, isComment } from './otTypes'
+
+export function transformOps(
+ ops1: OtOp[],
+ ops2: OtOp[]
+): { left: OtOp[]; right: OtOp[] } {
+ let right = ops2
+
+ const newLeft: OtOp[] = []
+ for (const op1 of ops1) {
+ let transformed = op1
+ const newRight: OtOp[] = []
+ for (const op2 of right) {
+ const { left: tl, right: tr } = transformOp(transformed, op2)
+ transformed = tl
+ newRight.push(tr)
+ }
+ newLeft.push(transformed)
+ right = newRight
+ }
+
+ return { left: newLeft, right }
+}
+
+function transformOp(op1: OtOp, op2: OtOp): { left: OtOp; right: OtOp } {
+ if (isInsert(op1) && isInsert(op2)) {
+ if (op1.p <= op2.p) {
+ return { left: op1, right: { ...op2, p: op2.p + op1.i.length } }
+ } else {
+ return { left: { ...op1, p: op1.p + op2.i.length }, right: op2 }
+ }
+ }
+
+ if (isInsert(op1) && isDelete(op2)) {
+ if (op1.p <= op2.p) {
+ return { left: op1, right: { ...op2, p: op2.p + op1.i.length } }
+ } else if (op1.p >= op2.p + op2.d.length) {
+ return { left: { ...op1, p: op1.p - op2.d.length }, right: op2 }
+ } else {
+ return { left: { ...op1, p: op2.p }, right: op2 }
+ }
+ }
+
+ if (isDelete(op1) && isInsert(op2)) {
+ if (op2.p <= op1.p) {
+ return { left: { ...op1, p: op1.p + op2.i.length }, right: op2 }
+ } else if (op2.p >= op1.p + op1.d.length) {
+ return { left: op1, right: { ...op2, p: op2.p - op1.d.length } }
+ } else {
+ return { left: op1, right: { ...op2, p: op2.p - op1.d.length } }
+ }
+ }
+
+ if (isDelete(op1) && isDelete(op2)) {
+ if (op1.p >= op2.p + op2.d.length) {
+ return {
+ left: { ...op1, p: op1.p - op2.d.length },
+ right: { ...op2, p: op2.p }
+ }
+ } else if (op2.p >= op1.p + op1.d.length) {
+ return {
+ left: op1,
+ right: { ...op2, p: op2.p - op1.d.length }
+ }
+ } else {
+ const overlapStart = Math.max(0, op2.p - op1.p)
+ const overlapEnd = Math.min(op1.d.length, op2.p + op2.d.length - op1.p)
+ let newOp1Text = op1.d
+ if (overlapEnd > overlapStart) {
+ newOp1Text = op1.d.slice(0, overlapStart) + op1.d.slice(overlapEnd)
+ }
+
+ const overlapStart2 = Math.max(0, op1.p - op2.p)
+ const overlapEnd2 = Math.min(op2.d.length, op1.p + op1.d.length - op2.p)
+ let newOp2Text = op2.d
+ if (overlapEnd2 > overlapStart2) {
+ newOp2Text = op2.d.slice(0, overlapStart2) + op2.d.slice(overlapEnd2)
+ }
+
+ const newP1 = op1.p <= op2.p ? op1.p : op1.p - (overlapEnd2 - overlapStart2)
+ const newP2 = op2.p <= op1.p ? op2.p : op2.p - (overlapEnd - overlapStart)
+
+ return {
+ left: newOp1Text ? { d: newOp1Text, p: Math.max(0, newP1) } : { d: '', p: 0 },
+ right: newOp2Text ? { d: newOp2Text, p: Math.max(0, newP2) } : { d: '', p: 0 }
+ }
+ }
+ }
+
+ if (isComment(op1) || isComment(op2)) {
+ if (isComment(op1)) {
+ if (isInsert(op2) && op2.p <= op1.p) {
+ return { left: { ...op1, p: op1.p + op2.i.length }, right: op2 }
+ }
+ if (isDelete(op2) && op2.p < op1.p) {
+ const shift = Math.min(op2.d.length, op1.p - op2.p)
+ return { left: { ...op1, p: op1.p - shift }, right: op2 }
+ }
+ }
+
+ if (isComment(op2)) {
+ if (isInsert(op1) && op1.p <= op2.p) {
+ return { left: op1, right: { ...op2, p: op2.p + op1.i.length } }
+ }
+ if (isDelete(op1) && op1.p < op2.p) {
+ const shift = Math.min(op1.d.length, op2.p - op1.p)
+ return { left: op1, right: { ...op2, p: op2.p - shift } }
+ }
+ }
+
+ return { left: op1, right: op2 }
+ }
+
+ return { left: op1, right: op2 }
+}
diff --git a/src/main/otTypes.ts b/src/main/otTypes.ts
new file mode 100644
index 0000000..8e9df9f
--- /dev/null
+++ b/src/main/otTypes.ts
@@ -0,0 +1,31 @@
+// OT type definitions for main process (mirror of renderer types)
+
+export interface InsertOp {
+ i: string
+ p: number
+}
+
+export interface DeleteOp {
+ d: string
+ p: number
+}
+
+export interface CommentOp {
+ c: string
+ p: number
+ t: string
+}
+
+export type OtOp = InsertOp | DeleteOp | CommentOp
+
+export function isInsert(op: OtOp): op is InsertOp {
+ return 'i' in op
+}
+
+export function isDelete(op: OtOp): op is DeleteOp {
+ return 'd' in op
+}
+
+export function isComment(op: OtOp): op is CommentOp {
+ return 'c' in op
+}
diff --git a/src/main/overleafProtocol.ts b/src/main/overleafProtocol.ts
new file mode 100644
index 0000000..49b06d7
--- /dev/null
+++ b/src/main/overleafProtocol.ts
@@ -0,0 +1,95 @@
+// Socket.IO v0.9 protocol encoding/decoding
+
+export interface ParsedMessage {
+ type: 'disconnect' | 'connect' | 'heartbeat' | 'event' | 'ack' | 'error' | 'noop'
+ id?: number
+ data?: unknown
+ name?: string
+ args?: unknown[]
+}
+
+/**
+ * Parse a Socket.IO v0.9 message frame.
+ *
+ * Frame format:
+ * 0:: disconnect
+ * 1:: connect
+ * 2:: heartbeat
+ * 5:::{"name":"x","args":[...]} event
+ * 5:N+::{"name":"x","args":[...]} event with ack request
+ * 6:::N+[jsonData] ack response
+ * 8:: noop
+ */
+export function parseSocketMessage(raw: string): ParsedMessage | null {
+ if (!raw || raw.length === 0) return null
+
+ const type = raw[0]
+
+ switch (type) {
+ case '0':
+ return { type: 'disconnect' }
+ case '1':
+ return { type: 'connect' }
+ case '2':
+ return { type: 'heartbeat' }
+ case '8':
+ return { type: 'noop' }
+ case '5': {
+ // Event: 5:::{"name":"x","args":[...]} or 5:N+::{"name":"x","args":[...]}
+ const ackMatch = raw.match(/^5:(\d+)\+::(.*)$/s)
+ if (ackMatch) {
+ try {
+ const payload = JSON.parse(ackMatch[2])
+ return {
+ type: 'event',
+ id: parseInt(ackMatch[1]),
+ name: payload.name,
+ args: payload.args || []
+ }
+ } catch {
+ return null
+ }
+ }
+ const evtMatch = raw.match(/^5:::(.*)$/s)
+ if (evtMatch) {
+ try {
+ const payload = JSON.parse(evtMatch[1])
+ return { type: 'event', name: payload.name, args: payload.args || [] }
+ } catch {
+ return null
+ }
+ }
+ return null
+ }
+ case '6': {
+ // Ack: 6:::N+[jsonData]
+ const ackMatch = raw.match(/^6:::(\d+)\+([\s\S]*)/)
+ if (ackMatch) {
+ try {
+ const data = JSON.parse(ackMatch[2])
+ return { type: 'ack', id: parseInt(ackMatch[1]), data }
+ } catch {
+ return { type: 'ack', id: parseInt(ackMatch[1]), data: null }
+ }
+ }
+ return null
+ }
+ default:
+ return null
+ }
+}
+
+/** Encode a Socket.IO v0.9 event (no ack) */
+export function encodeEvent(name: string, args: unknown[]): string {
+ return '5:::' + JSON.stringify({ name, args })
+}
+
+/** Encode a Socket.IO v0.9 event that expects an ack response */
+export function encodeEventWithAck(ackId: number, name: string, args: unknown[]): string {
+ return `5:${ackId}+::` + JSON.stringify({ name, args })
+}
+
+/** Encode a heartbeat response */
+export function encodeHeartbeat(): string {
+ return '2::'
+}
diff --git a/src/main/overleafSocket.ts b/src/main/overleafSocket.ts
new file mode 100644
index 0000000..f825c4c
--- /dev/null
+++ b/src/main/overleafSocket.ts
@@ -0,0 +1,401 @@
+// Persistent Socket.IO v0.9 client for real-time Overleaf collaboration
+import { EventEmitter } from 'events'
+import WebSocket from 'ws'
+import { net } from 'electron'
+import {
+ parseSocketMessage,
+ encodeEvent,
+ encodeEventWithAck,
+ encodeHeartbeat
+} from './overleafProtocol'
+
+export interface JoinProjectResult {
+ publicId: string
+ project: {
+ _id: string
+ name: string
+ rootDoc_id: string
+ rootFolder: RootFolder[]
+ owner: { _id: string; first_name: string; last_name: string; email: string }
+ }
+ permissionsLevel: string
+}
+
+export interface RootFolder {
+ _id: string
+ name: string
+ docs: DocRef[]
+ fileRefs: FileRef[]
+ folders: SubFolder[]
+}
+
+export interface SubFolder {
+ _id: string
+ name: string
+ docs: DocRef[]
+ fileRefs: FileRef[]
+ folders: SubFolder[]
+}
+
+export interface DocRef {
+ _id: string
+ name: string
+}
+
+export interface FileRef {
+ _id: string
+ name: string
+ linkedFileData?: unknown
+ created: string
+}
+
+export interface CommentOp {
+ c: string
+ p: number
+ t: string
+}
+
+export interface JoinDocResult {
+ docLines: string[]
+ version: number
+ updates: unknown[]
+ ranges: {
+ comments: Array<{ id: string; op: CommentOp }>
+ changes: unknown[]
+ }
+}
+
+export type ConnectionState = 'disconnected' | 'connecting' | 'connected' | 'reconnecting'
+
+export class OverleafSocket extends EventEmitter {
+ private ws: WebSocket | null = null
+ private cookie: string = ''
+ private projectId: string = ''
+ private sid: string = ''
+ private ackId = 0
+ private ackCallbacks = new Map<number, (data: unknown) => void>()
+ private eventWaiters = new Map<string, (args: unknown[]) => void>()
+ private heartbeatTimer: ReturnType<typeof setInterval> | null = null
+ private reconnectTimer: ReturnType<typeof setTimeout> | null = null
+ private reconnectAttempt = 0
+ private maxReconnectDelay = 30000
+ private joinedDocs = new Set<string>()
+ private _state: ConnectionState = 'disconnected'
+ private _projectData: JoinProjectResult | null = null
+ private shouldReconnect = true
+
+ get state(): ConnectionState {
+ return this._state
+ }
+
+ get projectData(): JoinProjectResult | null {
+ return this._projectData
+ }
+
+ private setState(s: ConnectionState) {
+ this._state = s
+ this.emit('connectionState', s)
+ }
+
+ async connect(projectId: string, cookie: string): Promise<JoinProjectResult> {
+ this.projectId = projectId
+ this.cookie = cookie
+ this.shouldReconnect = true
+ return this.doConnect()
+ }
+
+ private async doConnect(): Promise<JoinProjectResult> {
+ this.setState('connecting')
+
+ // Step 1: HTTP handshake to get SID
+ const hsData = await this.handshake()
+ this.sid = hsData.sid
+
+ // Step 2: Open WebSocket
+ return new Promise((resolve, reject) => {
+ const wsUrl = `wss://www.overleaf.com/socket.io/1/websocket/${this.sid}`
+ this.ws = new WebSocket(wsUrl, {
+ headers: { Cookie: this.cookie }
+ })
+
+ const timeout = setTimeout(() => {
+ reject(new Error('WebSocket connection timeout'))
+ this.ws?.close()
+ }, 30000)
+
+ this.ws.on('open', () => {
+ // Wait for connect message (1::) then joinProject
+ })
+
+ this.ws.on('message', (data: WebSocket.Data) => {
+ const raw = data.toString()
+ this.handleMessage(raw, resolve, reject, timeout)
+ })
+
+ this.ws.on('error', (err) => {
+ clearTimeout(timeout)
+ reject(err)
+ })
+
+ this.ws.on('close', () => {
+ this.stopHeartbeat()
+ if (this._state === 'connected' && this.shouldReconnect) {
+ this.scheduleReconnect()
+ }
+ })
+ })
+ }
+
+ private connectResolveFn: ((result: JoinProjectResult) => void) | null = null
+ private connectRejectFn: ((err: Error) => void) | null = null
+ private connectTimeout: ReturnType<typeof setTimeout> | null = null
+
+ private handleMessage(
+ raw: string,
+ connectResolve?: (result: JoinProjectResult) => void,
+ connectReject?: (err: Error) => void,
+ connectTimeout?: ReturnType<typeof setTimeout>
+ ) {
+ const msg = parseSocketMessage(raw)
+ if (!msg) return
+
+ switch (msg.type) {
+ case 'connect':
+ // Server acknowledged connection, now joinProject
+ this.sendJoinProject(connectResolve, connectReject, connectTimeout)
+ break
+
+ case 'heartbeat':
+ this.ws?.send(encodeHeartbeat())
+ break
+
+ case 'ack':
+ if (msg.id !== undefined) {
+ const cb = this.ackCallbacks.get(msg.id)
+ if (cb) {
+ this.ackCallbacks.delete(msg.id)
+ cb(msg.data)
+ }
+ }
+ break
+
+ case 'event':
+ if (msg.name) {
+ // Check if someone is waiting for this event name
+ const waiter = this.eventWaiters.get(msg.name)
+ if (waiter) {
+ this.eventWaiters.delete(msg.name)
+ waiter(msg.args || [])
+ }
+ // Relay real-time events to listeners
+ this.emit('serverEvent', msg.name, msg.args || [])
+
+ // Handle specific real-time events
+ if (msg.name === 'otUpdateApplied') {
+ this.emit('otAck', msg.args?.[0])
+ } else if (msg.name === 'otUpdateError') {
+ this.emit('otError', msg.args?.[0])
+ }
+ }
+ break
+
+ case 'disconnect':
+ this.ws?.close()
+ break
+ }
+ }
+
+ private sendJoinProject(
+ resolve?: (result: JoinProjectResult) => void,
+ reject?: (err: Error) => void,
+ timeout?: ReturnType<typeof setTimeout>
+ ) {
+ // joinProject uses a named event, response comes as joinProjectResponse event
+ const jpPromise = this.waitForEvent('joinProjectResponse')
+
+ this.ws?.send(encodeEvent('joinProject', [{ project_id: this.projectId }]))
+
+ jpPromise.then((args) => {
+ if (timeout) clearTimeout(timeout)
+
+ // Find the project data in the response args
+ let projectResult: JoinProjectResult | null = null
+ for (const arg of args) {
+ if (arg && typeof arg === 'object' && 'project' in (arg as object)) {
+ projectResult = arg as JoinProjectResult
+ break
+ }
+ }
+
+ if (!projectResult) {
+ reject?.(new Error('joinProject: no project data in response'))
+ return
+ }
+
+ this._projectData = projectResult
+ this.setState('connected')
+ this.reconnectAttempt = 0
+ this.startHeartbeat()
+ resolve?.(projectResult)
+ }).catch((err) => {
+ if (timeout) clearTimeout(timeout)
+ reject?.(err)
+ })
+ }
+
+ async joinDoc(docId: string): Promise<JoinDocResult> {
+ const result = await this.emitWithAck('joinDoc', [docId, { encodeRanges: true }]) as unknown[]
+ this.joinedDocs.add(docId)
+
+ // Ack response format: [error, docLines, version, updates, ranges, pathname]
+ // First element is error (null = success)
+ const err = result[0]
+ if (err) throw new Error(`joinDoc failed: ${JSON.stringify(err)}`)
+
+ const docLines = (result[1] as string[]) || []
+ const version = (result[2] as number) || 0
+ const updates = (result[3] as unknown[]) || []
+ const ranges = (result[4] || { comments: [], changes: [] }) as JoinDocResult['ranges']
+
+ return { docLines, version, updates, ranges }
+ }
+
+ async leaveDoc(docId: string): Promise<void> {
+ await this.emitWithAck('leaveDoc', [docId])
+ this.joinedDocs.delete(docId)
+ }
+
+ async applyOtUpdate(docId: string, ops: unknown[], version: number, hash: string): Promise<void> {
+ // Fire-and-forget: server responds with otUpdateApplied or otUpdateError event
+ this.ws?.send(encodeEvent('applyOtUpdate', [docId, { doc: docId, op: ops, v: version, hash, lastV: version }]))
+ }
+
+ disconnect() {
+ this.shouldReconnect = false
+ this.stopHeartbeat()
+ if (this.reconnectTimer) {
+ clearTimeout(this.reconnectTimer)
+ this.reconnectTimer = null
+ }
+ this.joinedDocs.clear()
+ this.ackCallbacks.clear()
+ this.eventWaiters.clear()
+ this.ws?.close()
+ this.ws = null
+ this._projectData = null
+ this.setState('disconnected')
+ }
+
+ private async handshake(): Promise<{ sid: string; setCookies: string[] }> {
+ return new Promise((resolve, reject) => {
+ const url = `https://www.overleaf.com/socket.io/1/?t=${Date.now()}&projectId=${this.projectId}`
+ const req = net.request(url)
+ req.setHeader('Cookie', this.cookie)
+ req.setHeader('User-Agent', 'Mozilla/5.0')
+
+ let body = ''
+ const setCookies: string[] = []
+
+ req.on('response', (res) => {
+ const rawHeaders = res.headers['set-cookie']
+ if (rawHeaders) {
+ if (Array.isArray(rawHeaders)) {
+ setCookies.push(...rawHeaders)
+ } else {
+ setCookies.push(rawHeaders)
+ }
+ }
+ res.on('data', (chunk) => { body += chunk.toString() })
+ res.on('end', () => {
+ const sid = body.split(':')[0]
+ if (!sid) {
+ reject(new Error('handshake: no SID in response'))
+ return
+ }
+ // Merge GCLB cookies into our cookie string
+ for (const sc of setCookies) {
+ const part = sc.split(';')[0]
+ if (part && !this.cookie.includes(part)) {
+ this.cookie += '; ' + part
+ }
+ }
+ resolve({ sid, setCookies })
+ })
+ })
+ req.on('error', reject)
+ req.end()
+ })
+ }
+
+ private emitWithAck(name: string, args: unknown[]): Promise<unknown> {
+ return new Promise((resolve, reject) => {
+ if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {
+ reject(new Error('WebSocket not connected'))
+ return
+ }
+ this.ackId++
+ const id = this.ackId
+ const timer = setTimeout(() => {
+ this.ackCallbacks.delete(id)
+ reject(new Error(`ack timeout for ${name}`))
+ }, 30000)
+
+ this.ackCallbacks.set(id, (data) => {
+ clearTimeout(timer)
+ resolve(data)
+ })
+
+ this.ws.send(encodeEventWithAck(id, name, args))
+ })
+ }
+
+ private waitForEvent(name: string): Promise<unknown[]> {
+ return new Promise((resolve) => {
+ this.eventWaiters.set(name, resolve)
+ })
+ }
+
+ private startHeartbeat() {
+ this.stopHeartbeat()
+ this.heartbeatTimer = setInterval(() => {
+ if (this.ws?.readyState === WebSocket.OPEN) {
+ this.ws.send(encodeHeartbeat())
+ }
+ }, 25000)
+ }
+
+ private stopHeartbeat() {
+ if (this.heartbeatTimer) {
+ clearInterval(this.heartbeatTimer)
+ this.heartbeatTimer = null
+ }
+ }
+
+ private scheduleReconnect() {
+ this.setState('reconnecting')
+ const delay = Math.min(1000 * Math.pow(2, this.reconnectAttempt), this.maxReconnectDelay)
+ this.reconnectAttempt++
+
+ console.log(`[OverleafSocket] reconnecting in ${delay}ms (attempt ${this.reconnectAttempt})`)
+
+ this.reconnectTimer = setTimeout(async () => {
+ try {
+ await this.doConnect()
+ // Re-join docs
+ for (const docId of this.joinedDocs) {
+ try {
+ const result = await this.joinDoc(docId)
+ this.emit('docRejoined', docId, result)
+ } catch (e) {
+ console.log(`[OverleafSocket] failed to rejoin doc ${docId}:`, e)
+ }
+ }
+ } catch (e) {
+ console.log('[OverleafSocket] reconnect failed:', e)
+ if (this.shouldReconnect) {
+ this.scheduleReconnect()
+ }
+ }
+ }, delay)
+ }
+}