diff --git a/eslint.config.js b/eslint.config.js index 742f0c2b..6419a9fd 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -165,9 +165,8 @@ export default tseslint.config( pattern: [ "server/projects.js", "server/sessionManager.js", - "server/database/*.{js,ts}", "server/utils/runtime-paths.js", - ], // provider history loading still resolves session data through these legacy runtime/database files + ], // provider history loading still resolves session data through these legacy runtime files mode: "file", }, { diff --git a/package-lock.json b/package-lock.json index 2bdf35ab..c22e33c2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -25,6 +25,7 @@ "@replit/codemirror-minimap": "^0.5.2", "@tailwindcss/typography": "^0.5.16", "@uiw/react-codemirror": "^4.23.13", + "@vscode/ripgrep": "^1.17.1", "@xterm/addon-clipboard": "^0.1.0", "@xterm/addon-fit": "^0.10.0", "@xterm/addon-web-links": "^0.11.0", @@ -80,6 +81,7 @@ "@types/node": "^22.19.7", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", + "@types/ws": "^8.18.1", "@vitejs/plugin-react": "^4.6.0", "auto-changelog": "^2.5.0", "autoprefixer": "^10.4.16", @@ -4142,6 +4144,16 @@ "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", "license": "MIT" }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.56.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz", @@ -4786,6 +4798,18 @@ "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, + "node_modules/@vscode/ripgrep": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.17.1.tgz", + "integrity": "sha512-xTs7DGyAO3IsJYOCTBP8LnTvPiYVKEuyv8s0xyJDBXfs8rhBfqnZPvb6xDT+RnwWzcXqW27xLS/aGrkjX7lNWw==", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "https-proxy-agent": "^7.0.2", + "proxy-from-env": "^1.1.0", + "yauzl": "^2.9.2" + } + }, "node_modules/@xterm/addon-clipboard": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/@xterm/addon-clipboard/-/addon-clipboard-0.1.0.tgz", @@ -5618,6 +5642,15 @@ "ieee754": "^1.1.13" } }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, "node_modules/buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", @@ -8277,6 +8310,15 @@ "walk-up-path": "^4.0.0" } }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "license": "MIT", + "dependencies": { + "pend": "~1.2.0" + } + }, "node_modules/file-entry-cache": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", @@ -13381,6 +13423,12 @@ "dev": true, "license": "MIT" }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "license": "MIT" + }, "node_modules/perfect-debounce": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.0.0.tgz", @@ -13774,7 +13822,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true, "license": "MIT" }, "node_modules/pump": { @@ -18225,6 +18272,16 @@ "node": ">=8" } }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "license": "MIT", + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/package.json b/package.json index 3388f7a4..938391cb 100644 --- a/package.json +++ b/package.json @@ -80,6 +80,7 @@ "@replit/codemirror-minimap": "^0.5.2", "@tailwindcss/typography": "^0.5.16", "@uiw/react-codemirror": "^4.23.13", + "@vscode/ripgrep": "^1.17.1", "@xterm/addon-clipboard": "^0.1.0", "@xterm/addon-fit": "^0.10.0", "@xterm/addon-web-links": "^0.11.0", @@ -132,6 +133,7 @@ "@types/node": "^22.19.7", "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", + "@types/ws": "^8.18.1", "@vitejs/plugin-react": "^4.6.0", "auto-changelog": "^2.5.0", "autoprefixer": "^10.4.16", diff --git a/server/database/db.js b/server/database/db.js deleted file mode 100644 index f43c894a..00000000 --- a/server/database/db.js +++ /dev/null @@ -1,593 +0,0 @@ -import Database from 'better-sqlite3'; -import path from 'path'; -import fs from 'fs'; -import crypto from 'crypto'; -import { findAppRoot, getModuleDir } from '../utils/runtime-paths.js'; -import { - APP_CONFIG_TABLE_SQL, - USER_NOTIFICATION_PREFERENCES_TABLE_SQL, - VAPID_KEYS_TABLE_SQL, - PUSH_SUBSCRIPTIONS_TABLE_SQL, - SESSION_NAMES_TABLE_SQL, - SESSION_NAMES_LOOKUP_INDEX_SQL, - DATABASE_SCHEMA_SQL -} from './schema.js'; - -const __dirname = getModuleDir(import.meta.url); -// The compiled backend lives under dist-server/server/database, but the install root we log -// should still point at the project/app root. Resolving it here avoids build-layout drift. -const APP_ROOT = findAppRoot(__dirname); - -// ANSI color codes for terminal output -const colors = { - reset: '\x1b[0m', - bright: '\x1b[1m', - cyan: '\x1b[36m', - dim: '\x1b[2m', -}; - -const c = { - info: (text) => `${colors.cyan}${text}${colors.reset}`, - bright: (text) => `${colors.bright}${text}${colors.reset}`, - dim: (text) => `${colors.dim}${text}${colors.reset}`, -}; - -// Use DATABASE_PATH environment variable if set, otherwise use default location -const DB_PATH = process.env.DATABASE_PATH || path.join(__dirname, 'auth.db'); - -// Ensure database directory exists if custom path is provided -if (process.env.DATABASE_PATH) { - const dbDir = path.dirname(DB_PATH); - try { - if (!fs.existsSync(dbDir)) { - fs.mkdirSync(dbDir, { recursive: true }); - console.log(`Created database directory: ${dbDir}`); - } - } catch (error) { - console.error(`Failed to create database directory ${dbDir}:`, error.message); - throw error; - } -} - -// As part of 1.19.2 we are introducing a new location for auth.db. The below handles exisitng moving legacy database from install directory to new location -const LEGACY_DB_PATH = path.join(__dirname, 'auth.db'); -if (DB_PATH !== LEGACY_DB_PATH && !fs.existsSync(DB_PATH) && fs.existsSync(LEGACY_DB_PATH)) { - try { - fs.copyFileSync(LEGACY_DB_PATH, DB_PATH); - console.log(`[MIGRATION] Copied database from ${LEGACY_DB_PATH} to ${DB_PATH}`); - for (const suffix of ['-wal', '-shm']) { - if (fs.existsSync(LEGACY_DB_PATH + suffix)) { - fs.copyFileSync(LEGACY_DB_PATH + suffix, DB_PATH + suffix); - } - } - } catch (err) { - console.warn(`[MIGRATION] Could not copy legacy database: ${err.message}`); - } -} - -// Create database connection -const db = new Database(DB_PATH); - -// app_config must exist before any other module imports (auth.js reads the JWT secret at load time). -// runMigrations() also creates this table, but it runs too late for existing installations -// where auth.js is imported before initializeDatabase() is called. -db.exec(APP_CONFIG_TABLE_SQL); - -// Show app installation path prominently -const appInstallPath = APP_ROOT; -console.log(''); -console.log(c.dim('═'.repeat(60))); -console.log(`${c.info('[INFO]')} App Installation: ${c.bright(appInstallPath)}`); -console.log(`${c.info('[INFO]')} Database: ${c.dim(path.relative(appInstallPath, DB_PATH))}`); -if (process.env.DATABASE_PATH) { - console.log(` ${c.dim('(Using custom DATABASE_PATH from environment)')}`); -} -console.log(c.dim('═'.repeat(60))); -console.log(''); - -const runMigrations = () => { - try { - const tableInfo = db.prepare("PRAGMA table_info(users)").all(); - const columnNames = tableInfo.map(col => col.name); - - if (!columnNames.includes('git_name')) { - console.log('Running migration: Adding git_name column'); - db.exec('ALTER TABLE users ADD COLUMN git_name TEXT'); - } - - if (!columnNames.includes('git_email')) { - console.log('Running migration: Adding git_email column'); - db.exec('ALTER TABLE users ADD COLUMN git_email TEXT'); - } - - if (!columnNames.includes('has_completed_onboarding')) { - console.log('Running migration: Adding has_completed_onboarding column'); - db.exec('ALTER TABLE users ADD COLUMN has_completed_onboarding BOOLEAN DEFAULT 0'); - } - - db.exec(USER_NOTIFICATION_PREFERENCES_TABLE_SQL); - db.exec(VAPID_KEYS_TABLE_SQL); - db.exec(PUSH_SUBSCRIPTIONS_TABLE_SQL); - db.exec(APP_CONFIG_TABLE_SQL); - db.exec(SESSION_NAMES_TABLE_SQL); - db.exec(SESSION_NAMES_LOOKUP_INDEX_SQL); - - console.log('Database migrations completed successfully'); - } catch (error) { - console.error('Error running migrations:', error.message); - throw error; - } -}; - -// Initialize database with schema -const initializeDatabase = async () => { - try { - db.exec(DATABASE_SCHEMA_SQL); - console.log('Database initialized successfully'); - runMigrations(); - } catch (error) { - console.error('Error initializing database:', error.message); - throw error; - } -}; - -// User database operations -const userDb = { - // Check if any users exist - hasUsers: () => { - try { - const row = db.prepare('SELECT COUNT(*) as count FROM users').get(); - return row.count > 0; - } catch (err) { - throw err; - } - }, - - // Create a new user - createUser: (username, passwordHash) => { - try { - const stmt = db.prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)'); - const result = stmt.run(username, passwordHash); - return { id: result.lastInsertRowid, username }; - } catch (err) { - throw err; - } - }, - - // Get user by username - getUserByUsername: (username) => { - try { - const row = db.prepare('SELECT * FROM users WHERE username = ? AND is_active = 1').get(username); - return row; - } catch (err) { - throw err; - } - }, - - // Update last login time (non-fatal — logged but not thrown) - updateLastLogin: (userId) => { - try { - db.prepare('UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?').run(userId); - } catch (err) { - console.warn('Failed to update last login:', err.message); - } - }, - - // Get user by ID - getUserById: (userId) => { - try { - const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1').get(userId); - return row; - } catch (err) { - throw err; - } - }, - - getFirstUser: () => { - try { - const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1').get(); - return row; - } catch (err) { - throw err; - } - }, - - updateGitConfig: (userId, gitName, gitEmail) => { - try { - const stmt = db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?'); - stmt.run(gitName, gitEmail, userId); - } catch (err) { - throw err; - } - }, - - getGitConfig: (userId) => { - try { - const row = db.prepare('SELECT git_name, git_email FROM users WHERE id = ?').get(userId); - return row; - } catch (err) { - throw err; - } - }, - - completeOnboarding: (userId) => { - try { - const stmt = db.prepare('UPDATE users SET has_completed_onboarding = 1 WHERE id = ?'); - stmt.run(userId); - } catch (err) { - throw err; - } - }, - - hasCompletedOnboarding: (userId) => { - try { - const row = db.prepare('SELECT has_completed_onboarding FROM users WHERE id = ?').get(userId); - return row?.has_completed_onboarding === 1; - } catch (err) { - throw err; - } - } -}; - -// API Keys database operations -const apiKeysDb = { - // Generate a new API key - generateApiKey: () => { - return 'ck_' + crypto.randomBytes(32).toString('hex'); - }, - - // Create a new API key - createApiKey: (userId, keyName) => { - try { - const apiKey = apiKeysDb.generateApiKey(); - const stmt = db.prepare('INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)'); - const result = stmt.run(userId, keyName, apiKey); - return { id: result.lastInsertRowid, keyName, apiKey }; - } catch (err) { - throw err; - } - }, - - // Get all API keys for a user - getApiKeys: (userId) => { - try { - const rows = db.prepare('SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC').all(userId); - return rows; - } catch (err) { - throw err; - } - }, - - // Validate API key and get user - validateApiKey: (apiKey) => { - try { - const row = db.prepare(` - SELECT u.id, u.username, ak.id as api_key_id - FROM api_keys ak - JOIN users u ON ak.user_id = u.id - WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1 - `).get(apiKey); - - if (row) { - // Update last_used timestamp - db.prepare('UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?').run(row.api_key_id); - } - - return row; - } catch (err) { - throw err; - } - }, - - // Delete an API key - deleteApiKey: (userId, apiKeyId) => { - try { - const stmt = db.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?'); - const result = stmt.run(apiKeyId, userId); - return result.changes > 0; - } catch (err) { - throw err; - } - }, - - // Toggle API key active status - toggleApiKey: (userId, apiKeyId, isActive) => { - try { - const stmt = db.prepare('UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?'); - const result = stmt.run(isActive ? 1 : 0, apiKeyId, userId); - return result.changes > 0; - } catch (err) { - throw err; - } - } -}; - -// User credentials database operations (for GitHub tokens, GitLab tokens, etc.) -const credentialsDb = { - // Create a new credential - createCredential: (userId, credentialName, credentialType, credentialValue, description = null) => { - try { - const stmt = db.prepare('INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)'); - const result = stmt.run(userId, credentialName, credentialType, credentialValue, description); - return { id: result.lastInsertRowid, credentialName, credentialType }; - } catch (err) { - throw err; - } - }, - - // Get all credentials for a user, optionally filtered by type - getCredentials: (userId, credentialType = null) => { - try { - let query = 'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ?'; - const params = [userId]; - - if (credentialType) { - query += ' AND credential_type = ?'; - params.push(credentialType); - } - - query += ' ORDER BY created_at DESC'; - - const rows = db.prepare(query).all(...params); - return rows; - } catch (err) { - throw err; - } - }, - - // Get active credential value for a user by type (returns most recent active) - getActiveCredential: (userId, credentialType) => { - try { - const row = db.prepare('SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1').get(userId, credentialType); - return row?.credential_value || null; - } catch (err) { - throw err; - } - }, - - // Delete a credential - deleteCredential: (userId, credentialId) => { - try { - const stmt = db.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?'); - const result = stmt.run(credentialId, userId); - return result.changes > 0; - } catch (err) { - throw err; - } - }, - - // Toggle credential active status - toggleCredential: (userId, credentialId, isActive) => { - try { - const stmt = db.prepare('UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?'); - const result = stmt.run(isActive ? 1 : 0, credentialId, userId); - return result.changes > 0; - } catch (err) { - throw err; - } - } -}; - -const DEFAULT_NOTIFICATION_PREFERENCES = { - channels: { - inApp: false, - webPush: false - }, - events: { - actionRequired: true, - stop: true, - error: true - } -}; - -const normalizeNotificationPreferences = (value) => { - const source = value && typeof value === 'object' ? value : {}; - - return { - channels: { - inApp: source.channels?.inApp === true, - webPush: source.channels?.webPush === true - }, - events: { - actionRequired: source.events?.actionRequired !== false, - stop: source.events?.stop !== false, - error: source.events?.error !== false - } - }; -}; - -const notificationPreferencesDb = { - getPreferences: (userId) => { - try { - const row = db.prepare('SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?').get(userId); - if (!row) { - const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES); - db.prepare( - 'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)' - ).run(userId, JSON.stringify(defaults)); - return defaults; - } - - let parsed; - try { - parsed = JSON.parse(row.preferences_json); - } catch { - parsed = DEFAULT_NOTIFICATION_PREFERENCES; - } - return normalizeNotificationPreferences(parsed); - } catch (err) { - throw err; - } - }, - - updatePreferences: (userId, preferences) => { - try { - const normalized = normalizeNotificationPreferences(preferences); - db.prepare( - `INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) - VALUES (?, ?, CURRENT_TIMESTAMP) - ON CONFLICT(user_id) DO UPDATE SET - preferences_json = excluded.preferences_json, - updated_at = CURRENT_TIMESTAMP` - ).run(userId, JSON.stringify(normalized)); - return normalized; - } catch (err) { - throw err; - } - } -}; - -const pushSubscriptionsDb = { - saveSubscription: (userId, endpoint, keysP256dh, keysAuth) => { - try { - db.prepare( - `INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth) - VALUES (?, ?, ?, ?) - ON CONFLICT(endpoint) DO UPDATE SET - user_id = excluded.user_id, - keys_p256dh = excluded.keys_p256dh, - keys_auth = excluded.keys_auth` - ).run(userId, endpoint, keysP256dh, keysAuth); - } catch (err) { - throw err; - } - }, - - getSubscriptions: (userId) => { - try { - return db.prepare('SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?').all(userId); - } catch (err) { - throw err; - } - }, - - removeSubscription: (endpoint) => { - try { - db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint); - } catch (err) { - throw err; - } - }, - - removeAllForUser: (userId) => { - try { - db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId); - } catch (err) { - throw err; - } - } -}; - -// Session custom names database operations -const sessionNamesDb = { - // Set (insert or update) a custom session name - setName: (sessionId, provider, customName) => { - db.prepare(` - INSERT INTO session_names (session_id, provider, custom_name) - VALUES (?, ?, ?) - ON CONFLICT(session_id, provider) - DO UPDATE SET custom_name = excluded.custom_name, updated_at = CURRENT_TIMESTAMP - `).run(sessionId, provider, customName); - }, - - // Get a single custom session name - getName: (sessionId, provider) => { - const row = db.prepare( - 'SELECT custom_name FROM session_names WHERE session_id = ? AND provider = ?' - ).get(sessionId, provider); - return row?.custom_name || null; - }, - - // Batch lookup — returns Map - getNames: (sessionIds, provider) => { - if (!sessionIds.length) return new Map(); - const placeholders = sessionIds.map(() => '?').join(','); - const rows = db.prepare( - `SELECT session_id, custom_name FROM session_names - WHERE session_id IN (${placeholders}) AND provider = ?` - ).all(...sessionIds, provider); - return new Map(rows.map(r => [r.session_id, r.custom_name])); - }, - - // Delete a custom session name - deleteName: (sessionId, provider) => { - return db.prepare( - 'DELETE FROM session_names WHERE session_id = ? AND provider = ?' - ).run(sessionId, provider).changes > 0; - }, -}; - -// Apply custom session names from the database (overrides CLI-generated summaries) -function applyCustomSessionNames(sessions, provider) { - if (!sessions?.length) return; - try { - const ids = sessions.map(s => s.id); - const customNames = sessionNamesDb.getNames(ids, provider); - for (const session of sessions) { - const custom = customNames.get(session.id); - if (custom) session.summary = custom; - } - } catch (error) { - console.warn(`[DB] Failed to apply custom session names for ${provider}:`, error.message); - } -} - -// App config database operations -const appConfigDb = { - get: (key) => { - try { - const row = db.prepare('SELECT value FROM app_config WHERE key = ?').get(key); - return row?.value || null; - } catch (err) { - return null; - } - }, - - set: (key, value) => { - db.prepare( - 'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value' - ).run(key, value); - }, - - getOrCreateJwtSecret: () => { - let secret = appConfigDb.get('jwt_secret'); - if (!secret) { - secret = crypto.randomBytes(64).toString('hex'); - appConfigDb.set('jwt_secret', secret); - } - return secret; - } -}; - -// Backward compatibility - keep old names pointing to new system -const githubTokensDb = { - createGithubToken: (userId, tokenName, githubToken, description = null) => { - return credentialsDb.createCredential(userId, tokenName, 'github_token', githubToken, description); - }, - getGithubTokens: (userId) => { - return credentialsDb.getCredentials(userId, 'github_token'); - }, - getActiveGithubToken: (userId) => { - return credentialsDb.getActiveCredential(userId, 'github_token'); - }, - deleteGithubToken: (userId, tokenId) => { - return credentialsDb.deleteCredential(userId, tokenId); - }, - toggleGithubToken: (userId, tokenId, isActive) => { - return credentialsDb.toggleCredential(userId, tokenId, isActive); - } -}; - -export { - db, - initializeDatabase, - userDb, - apiKeysDb, - credentialsDb, - notificationPreferencesDb, - pushSubscriptionsDb, - sessionNamesDb, - applyCustomSessionNames, - appConfigDb, - githubTokensDb // Backward compatibility -}; diff --git a/server/database/schema.js b/server/database/schema.js deleted file mode 100644 index 21c1b8eb..00000000 --- a/server/database/schema.js +++ /dev/null @@ -1,102 +0,0 @@ -export const APP_CONFIG_TABLE_SQL = `CREATE TABLE IF NOT EXISTS app_config ( - key TEXT PRIMARY KEY, - value TEXT NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP -);`; - -export const USER_NOTIFICATION_PREFERENCES_TABLE_SQL = `CREATE TABLE IF NOT EXISTS user_notification_preferences ( - user_id INTEGER PRIMARY KEY, - preferences_json TEXT NOT NULL, - updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE -);`; - -export const VAPID_KEYS_TABLE_SQL = `CREATE TABLE IF NOT EXISTS vapid_keys ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - public_key TEXT NOT NULL, - private_key TEXT NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP -);`; - -export const PUSH_SUBSCRIPTIONS_TABLE_SQL = `CREATE TABLE IF NOT EXISTS push_subscriptions ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL, - endpoint TEXT NOT NULL UNIQUE, - keys_p256dh TEXT NOT NULL, - keys_auth TEXT NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE -);`; - -export const SESSION_NAMES_TABLE_SQL = `CREATE TABLE IF NOT EXISTS session_names ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - session_id TEXT NOT NULL, - provider TEXT NOT NULL DEFAULT 'claude', - custom_name TEXT NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, - UNIQUE(session_id, provider) -);`; - -export const SESSION_NAMES_LOOKUP_INDEX_SQL = `CREATE INDEX IF NOT EXISTS idx_session_names_lookup ON session_names(session_id, provider);`; - -export const DATABASE_SCHEMA_SQL = `PRAGMA foreign_keys = ON; - -CREATE TABLE IF NOT EXISTS users ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - username TEXT UNIQUE NOT NULL, - password_hash TEXT NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - last_login DATETIME, - is_active BOOLEAN DEFAULT 1, - git_name TEXT, - git_email TEXT, - has_completed_onboarding BOOLEAN DEFAULT 0 -); - -CREATE INDEX IF NOT EXISTS idx_users_username ON users(username); -CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active); - -CREATE TABLE IF NOT EXISTS api_keys ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL, - key_name TEXT NOT NULL, - api_key TEXT UNIQUE NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - last_used DATETIME, - is_active BOOLEAN DEFAULT 1, - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE -); - -CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key); -CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id); -CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active); - -CREATE TABLE IF NOT EXISTS user_credentials ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL, - credential_name TEXT NOT NULL, - credential_type TEXT NOT NULL, - credential_value TEXT NOT NULL, - description TEXT, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - is_active BOOLEAN DEFAULT 1, - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE -); - -CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id); -CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type); -CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active); - -${USER_NOTIFICATION_PREFERENCES_TABLE_SQL} - -${VAPID_KEYS_TABLE_SQL} - -${PUSH_SUBSCRIPTIONS_TABLE_SQL} - -${SESSION_NAMES_TABLE_SQL} - -${SESSION_NAMES_LOOKUP_INDEX_SQL} - -${APP_CONFIG_TABLE_SQL} -`; diff --git a/server/index.js b/server/index.js index 62d85130..d8223e2a 100755 --- a/server/index.js +++ b/server/index.js @@ -1,39 +1,57 @@ #!/usr/bin/env node // Load environment variables before other imports execute import './load-env.js'; -import fs from 'fs'; +import fs, { promises as fsPromises } from 'fs'; import path from 'path'; -import { findAppRoot, getModuleDir } from './utils/runtime-paths.js'; - -import { AppError, createNormalizedMessage } from '@/shared/utils.js'; - - -const __dirname = getModuleDir(import.meta.url); -// The server source runs from /server, while the compiled output runs from /dist-server/server. -// Resolving the app root once keeps every repo-level lookup below aligned across both layouts. -const APP_ROOT = findAppRoot(__dirname); -const installMode = fs.existsSync(path.join(APP_ROOT, '.git')) ? 'git' : 'npm'; - -import { c } from './utils/colors.js'; - -console.log('SERVER_PORT from env:', process.env.SERVER_PORT); - -import express from 'express'; -import { WebSocketServer, WebSocket } from 'ws'; import os from 'os'; import http from 'http'; -import cors from 'cors'; -import { promises as fsPromises } from 'fs'; import { spawn } from 'child_process'; -import pty from 'node-pty'; + +import express from 'express'; +import cors from 'cors'; import mime from 'mime-types'; -import { getProjects, getSessions, renameProject, deleteSession, deleteProject, extractProjectDirectory, clearProjectDirectoryCache, searchConversations } from './projects.js'; -import { queryClaudeSDK, abortClaudeSDKSession, isClaudeSDKSessionActive, getActiveClaudeSDKSessions, resolveToolApproval, getPendingApprovalsForSession, reconnectSessionWriter } from './claude-sdk.js'; -import { spawnCursor, abortCursorSession, isCursorSessionActive, getActiveCursorSessions } from './cursor-cli.js'; -import { queryCodex, abortCodexSession, isCodexSessionActive, getActiveCodexSessions } from './openai-codex.js'; -import { spawnGemini, abortGeminiSession, isGeminiSessionActive, getActiveGeminiSessions } from './gemini-cli.js'; +import { AppError, WORKSPACES_ROOT, validateWorkspacePath } from '@/shared/utils.js'; +import { closeSessionsWatcher, initializeSessionsWatcher } from '@/modules/providers/index.js'; +import { createWebSocketServer } from '@/modules/websocket/index.js'; + +import { getConnectableHost } from '../shared/networkHosts.js'; + +import { findAppRoot, getModuleDir } from './utils/runtime-paths.js'; +import { + queryClaudeSDK, + abortClaudeSDKSession, + isClaudeSDKSessionActive, + getActiveClaudeSDKSessions, + resolveToolApproval, + getPendingApprovalsForSession, + reconnectSessionWriter, +} from './claude-sdk.js'; +import { + spawnCursor, + abortCursorSession, + isCursorSessionActive, + getActiveCursorSessions, +} from './cursor-cli.js'; +import { + queryCodex, + abortCodexSession, + isCodexSessionActive, + getActiveCodexSessions, +} from './openai-codex.js'; +import { + spawnGemini, + abortGeminiSession, + isGeminiSessionActive, + getActiveGeminiSessions, +} from './gemini-cli.js'; import sessionManager from './sessionManager.js'; +import { + stripAnsiSequences, + normalizeDetectedUrl, + extractUrlsFromText, + shouldAutoOpenUrlFromOutput, +} from './utils/url-detection.js'; import gitRoutes from './routes/git.js'; import authRoutes from './routes/auth.js'; import cursorRoutes from './routes/cursor.js'; @@ -42,210 +60,64 @@ import mcpUtilsRoutes from './routes/mcp-utils.js'; import commandsRoutes from './routes/commands.js'; import settingsRoutes from './routes/settings.js'; import agentRoutes from './routes/agent.js'; -import projectsRoutes, { WORKSPACES_ROOT, validateWorkspacePath } from './routes/projects.js'; +import projectModuleRoutes from './modules/projects/projects.routes.js'; import userRoutes from './routes/user.js'; -import codexRoutes from './routes/codex.js'; import geminiRoutes from './routes/gemini.js'; import pluginsRoutes from './routes/plugins.js'; -import messagesRoutes from './routes/messages.js'; import providerRoutes from './modules/providers/provider.routes.js'; import { startEnabledPluginServers, stopAllPlugins, getPluginPort } from './utils/plugin-process-manager.js'; -import { initializeDatabase, sessionNamesDb, applyCustomSessionNames } from './database/db.js'; +import { initializeDatabase, projectsDb } from './modules/database/index.js'; import { configureWebPush } from './services/vapid-keys.js'; import { validateApiKey, authenticateToken, authenticateWebSocket } from './middleware/auth.js'; import { IS_PLATFORM } from './constants/config.js'; -import { getConnectableHost } from '../shared/networkHosts.js'; +import { c } from './utils/colors.js'; -const VALID_PROVIDERS = ['claude', 'codex', 'cursor', 'gemini']; - -// File system watchers for provider project/session folders -const PROVIDER_WATCH_PATHS = [ - { provider: 'claude', rootPath: path.join(os.homedir(), '.claude', 'projects') }, - { provider: 'cursor', rootPath: path.join(os.homedir(), '.cursor', 'chats') }, - { provider: 'codex', rootPath: path.join(os.homedir(), '.codex', 'sessions') }, - { provider: 'gemini', rootPath: path.join(os.homedir(), '.gemini', 'projects') }, - { provider: 'gemini_sessions', rootPath: path.join(os.homedir(), '.gemini', 'sessions') } -]; -const WATCHER_IGNORED_PATTERNS = [ - '**/node_modules/**', - '**/.git/**', - '**/dist/**', - '**/build/**', - '**/*.tmp', - '**/*.swp', - '**/.DS_Store' -]; -const WATCHER_DEBOUNCE_MS = 300; -let projectsWatchers = []; -let projectsWatcherDebounceTimer = null; -const connectedClients = new Set(); -let isGetProjectsRunning = false; // Flag to prevent reentrant calls - -// Broadcast progress to all connected WebSocket clients -function broadcastProgress(progress) { - const message = JSON.stringify({ - type: 'loading_progress', - ...progress - }); - connectedClients.forEach(client => { - if (client.readyState === WebSocket.OPEN) { - client.send(message); - } - }); -} - -// Setup file system watchers for Claude, Cursor, and Codex project/session folders -async function setupProjectsWatcher() { - const chokidar = (await import('chokidar')).default; - - if (projectsWatcherDebounceTimer) { - clearTimeout(projectsWatcherDebounceTimer); - projectsWatcherDebounceTimer = null; - } - - await Promise.all( - projectsWatchers.map(async (watcher) => { - try { - await watcher.close(); - } catch (error) { - console.error('[WARN] Failed to close watcher:', error); - } - }) - ); - projectsWatchers = []; - - const debouncedUpdate = (eventType, filePath, provider, rootPath) => { - if (projectsWatcherDebounceTimer) { - clearTimeout(projectsWatcherDebounceTimer); - } - - projectsWatcherDebounceTimer = setTimeout(async () => { - // Prevent reentrant calls - if (isGetProjectsRunning) { - return; - } - - try { - isGetProjectsRunning = true; - - // Clear project directory cache when files change - clearProjectDirectoryCache(); - - // Get updated projects list - const updatedProjects = await getProjects(broadcastProgress); - - // Notify all connected clients about the project changes - const updateMessage = JSON.stringify({ - type: 'projects_updated', - projects: updatedProjects, - timestamp: new Date().toISOString(), - changeType: eventType, - changedFile: path.relative(rootPath, filePath), - watchProvider: provider - }); - - connectedClients.forEach(client => { - if (client.readyState === WebSocket.OPEN) { - client.send(updateMessage); - } - }); - - } catch (error) { - console.error('[ERROR] Error handling project changes:', error); - } finally { - isGetProjectsRunning = false; - } - }, WATCHER_DEBOUNCE_MS); - }; - - for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) { - try { - // chokidar v4 emits ENOENT via the "error" event for missing roots and will not auto-recover. - // Ensure provider folders exist before creating the watcher so watching stays active. - await fsPromises.mkdir(rootPath, { recursive: true }); - - // Initialize chokidar watcher with optimized settings - const watcher = chokidar.watch(rootPath, { - ignored: WATCHER_IGNORED_PATTERNS, - persistent: true, - ignoreInitial: true, // Don't fire events for existing files on startup - followSymlinks: false, - depth: 10, // Reasonable depth limit - awaitWriteFinish: { - stabilityThreshold: 100, // Wait 100ms for file to stabilize - pollInterval: 50 - } - }); - - // Set up event listeners - watcher - .on('add', (filePath) => debouncedUpdate('add', filePath, provider, rootPath)) - .on('change', (filePath) => debouncedUpdate('change', filePath, provider, rootPath)) - .on('unlink', (filePath) => debouncedUpdate('unlink', filePath, provider, rootPath)) - .on('addDir', (dirPath) => debouncedUpdate('addDir', dirPath, provider, rootPath)) - .on('unlinkDir', (dirPath) => debouncedUpdate('unlinkDir', dirPath, provider, rootPath)) - .on('error', (error) => { - console.error(`[ERROR] ${provider} watcher error:`, error); - }) - .on('ready', () => { - }); - - projectsWatchers.push(watcher); - } catch (error) { - console.error(`[ERROR] Failed to setup ${provider} watcher for ${rootPath}:`, error); - } - } - - if (projectsWatchers.length === 0) { - console.error('[ERROR] Failed to setup any provider watchers'); - } -} +const __dirname = getModuleDir(import.meta.url); +// The server source runs from /server, while the compiled output runs from /dist-server/server. +// Resolving the app root once keeps every repo-level lookup below aligned across both layouts. +const APP_ROOT = findAppRoot(__dirname); +const installMode = fs.existsSync(path.join(APP_ROOT, '.git')) ? 'git' : 'npm'; +console.log('SERVER_PORT from env:', process.env.SERVER_PORT); const app = express(); const server = http.createServer(app); -const ptySessionsMap = new Map(); -const PTY_SESSION_TIMEOUT = 30 * 60 * 1000; -const SHELL_URL_PARSE_BUFFER_LIMIT = 32768; -import { stripAnsiSequences, normalizeDetectedUrl, extractUrlsFromText, shouldAutoOpenUrlFromOutput } from './utils/url-detection.js'; - -// Single WebSocket server that handles both paths -const wss = new WebSocketServer({ - server, - verifyClient: (info) => { - console.log('WebSocket connection attempt to:', info.req.url); - - // Platform mode: always allow connection - if (IS_PLATFORM) { - const user = authenticateWebSocket(null); // Will return first user - if (!user) { - console.log('[WARN] Platform mode: No user found in database'); - return false; - } - info.req.user = user; - console.log('[OK] Platform mode WebSocket authenticated for user:', user.username); - return true; - } - - // Normal mode: verify token - // Extract token from query parameters or headers - const url = new URL(info.req.url, 'http://localhost'); - const token = url.searchParams.get('token') || - info.req.headers.authorization?.split(' ')[1]; - - // Verify token - const user = authenticateWebSocket(token); - if (!user) { - console.log('[WARN] WebSocket authentication failed'); - return false; - } - - // Store user info in the request for later use - info.req.user = user; - console.log('[OK] WebSocket authenticated for user:', user.username); - return true; - } +// Single WebSocket server that handles chat, shell, and plugin proxy paths. +const wss = createWebSocketServer(server, { + verifyClient: { + isPlatform: IS_PLATFORM, + authenticateWebSocket, + }, + chat: { + queryClaudeSDK, + spawnCursor, + queryCodex, + spawnGemini, + abortClaudeSDKSession, + abortCursorSession, + abortCodexSession, + abortGeminiSession, + resolveToolApproval, + isClaudeSDKSessionActive, + isCursorSessionActive, + isCodexSessionActive, + isGeminiSessionActive, + reconnectSessionWriter, + getPendingApprovalsForSession, + getActiveClaudeSDKSessions, + getActiveCursorSessions, + getActiveCodexSessions, + getActiveGeminiSessions, + }, + shell: { + getSessionById: (sessionId) => sessionManager.getSession(sessionId), + stripAnsiSequences, + normalizeDetectedUrl, + extractUrlsFromText, + shouldAutoOpenUrlFromOutput, + }, + getPluginPort, }); // Make WebSocket server available to routes @@ -281,7 +153,7 @@ app.use('/api', validateApiKey); app.use('/api/auth', authRoutes); // Projects API Routes (protected) -app.use('/api/projects', authenticateToken, projectsRoutes); +app.use('/api/projects', authenticateToken, projectModuleRoutes); // Git API Routes (protected) app.use('/api/git', authenticateToken, gitRoutes); @@ -304,18 +176,12 @@ app.use('/api/settings', authenticateToken, settingsRoutes); // User API Routes (protected) app.use('/api/user', authenticateToken, userRoutes); -// Codex API Routes (protected) -app.use('/api/codex', authenticateToken, codexRoutes); - // Gemini API Routes (protected) app.use('/api/gemini', authenticateToken, geminiRoutes); // Plugins API Routes (protected) app.use('/api/plugins', authenticateToken, pluginsRoutes); -// Unified session messages route (protected) -app.use('/api/sessions', authenticateToken, messagesRoutes); - // Unified provider MCP routes (protected) app.use('/api/providers', authenticateToken, providerRoutes); @@ -419,138 +285,6 @@ app.post('/api/system/update', authenticateToken, async (req, res) => { } }); -app.get('/api/projects', authenticateToken, async (req, res) => { - try { - const projects = await getProjects(broadcastProgress); - res.json(projects); - } catch (error) { - res.status(500).json({ error: error.message }); - } -}); - -app.get('/api/projects/:projectName/sessions', authenticateToken, async (req, res) => { - try { - const { limit = 5, offset = 0 } = req.query; - const result = await getSessions(req.params.projectName, parseInt(limit), parseInt(offset)); - applyCustomSessionNames(result.sessions, 'claude'); - res.json(result); - } catch (error) { - res.status(500).json({ error: error.message }); - } -}); - -// Rename project endpoint -app.put('/api/projects/:projectName/rename', authenticateToken, async (req, res) => { - try { - const { displayName } = req.body; - await renameProject(req.params.projectName, displayName); - res.json({ success: true }); - } catch (error) { - res.status(500).json({ error: error.message }); - } -}); - -// Delete session endpoint -app.delete('/api/projects/:projectName/sessions/:sessionId', authenticateToken, async (req, res) => { - try { - const { projectName, sessionId } = req.params; - console.log(`[API] Deleting session: ${sessionId} from project: ${projectName}`); - await deleteSession(projectName, sessionId); - sessionNamesDb.deleteName(sessionId, 'claude'); - console.log(`[API] Session ${sessionId} deleted successfully`); - res.json({ success: true }); - } catch (error) { - console.error(`[API] Error deleting session ${req.params.sessionId}:`, error); - res.status(500).json({ error: error.message }); - } -}); - -// Rename session endpoint -app.put('/api/sessions/:sessionId/rename', authenticateToken, async (req, res) => { - try { - const { sessionId } = req.params; - const safeSessionId = String(sessionId).replace(/[^a-zA-Z0-9._-]/g, ''); - if (!safeSessionId || safeSessionId !== String(sessionId)) { - return res.status(400).json({ error: 'Invalid sessionId' }); - } - const { summary, provider } = req.body; - if (!summary || typeof summary !== 'string' || summary.trim() === '') { - return res.status(400).json({ error: 'Summary is required' }); - } - if (summary.trim().length > 500) { - return res.status(400).json({ error: 'Summary must not exceed 500 characters' }); - } - if (!provider || !VALID_PROVIDERS.includes(provider)) { - return res.status(400).json({ error: `Provider must be one of: ${VALID_PROVIDERS.join(', ')}` }); - } - sessionNamesDb.setName(safeSessionId, provider, summary.trim()); - res.json({ success: true }); - } catch (error) { - console.error(`[API] Error renaming session ${req.params.sessionId}:`, error); - res.status(500).json({ error: error.message }); - } -}); - -// Delete project endpoint -// force=true to allow removal even when sessions exist -// deleteData=true to also delete session/memory files on disk (destructive) -app.delete('/api/projects/:projectName', authenticateToken, async (req, res) => { - try { - const { projectName } = req.params; - const force = req.query.force === 'true'; - const deleteData = req.query.deleteData === 'true'; - await deleteProject(projectName, force, deleteData); - res.json({ success: true }); - } catch (error) { - res.status(500).json({ error: error.message }); - } -}); - -// Search conversations content (SSE streaming) -app.get('/api/search/conversations', authenticateToken, async (req, res) => { - const query = typeof req.query.q === 'string' ? req.query.q.trim() : ''; - const parsedLimit = Number.parseInt(String(req.query.limit), 10); - const limit = Number.isNaN(parsedLimit) ? 50 : Math.max(1, Math.min(parsedLimit, 100)); - - if (query.length < 2) { - return res.status(400).json({ error: 'Query must be at least 2 characters' }); - } - - res.writeHead(200, { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive', - 'X-Accel-Buffering': 'no', - }); - - let closed = false; - const abortController = new AbortController(); - req.on('close', () => { closed = true; abortController.abort(); }); - - try { - await searchConversations(query, limit, ({ projectResult, totalMatches, scannedProjects, totalProjects }) => { - if (closed) return; - if (projectResult) { - res.write(`event: result\ndata: ${JSON.stringify({ projectResult, totalMatches, scannedProjects, totalProjects })}\n\n`); - } else { - res.write(`event: progress\ndata: ${JSON.stringify({ totalMatches, scannedProjects, totalProjects })}\n\n`); - } - }, abortController.signal); - if (!closed) { - res.write(`event: done\ndata: {}\n\n`); - } - } catch (error) { - console.error('Error searching conversations:', error); - if (!closed) { - res.write(`event: error\ndata: ${JSON.stringify({ error: 'Search failed' })}\n\n`); - } - } finally { - if (!closed) { - res.end(); - } - } -}); - const expandWorkspacePath = (inputPath) => { if (!inputPath) return inputPath; if (inputPath === '~') { @@ -684,9 +418,9 @@ app.post('/api/create-folder', authenticateToken, async (req, res) => { }); // Read file content endpoint -app.get('/api/projects/:projectName/file', authenticateToken, async (req, res) => { +app.get('/api/projects/:projectId/file', authenticateToken, async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { filePath } = req.query; @@ -695,7 +429,9 @@ app.get('/api/projects/:projectName/file', authenticateToken, async (req, res) = return res.status(400).json({ error: 'Invalid file path' }); } - const projectRoot = await extractProjectDirectory(projectName).catch(() => null); + // Resolve the absolute project root via the DB-backed helper; the + // caller passes the DB-assigned `projectId`, not a folder name. + const projectRoot = await projectsDb.getProjectPathById(projectId); if (!projectRoot) { return res.status(404).json({ error: 'Project not found' }); } @@ -724,9 +460,9 @@ app.get('/api/projects/:projectName/file', authenticateToken, async (req, res) = }); // Serve raw file bytes for previews and downloads. -app.get('/api/projects/:projectName/files/content', authenticateToken, async (req, res) => { +app.get('/api/projects/:projectId/files/content', authenticateToken, async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { path: filePath } = req.query; @@ -735,7 +471,8 @@ app.get('/api/projects/:projectName/files/content', authenticateToken, async (re return res.status(400).json({ error: 'Invalid file path' }); } - const projectRoot = await extractProjectDirectory(projectName).catch(() => null); + // Projects are now addressed by DB `projectId`, resolved to their path here. + const projectRoot = await projectsDb.getProjectPathById(projectId); if (!projectRoot) { return res.status(404).json({ error: 'Project not found' }); } @@ -781,9 +518,9 @@ app.get('/api/projects/:projectName/files/content', authenticateToken, async (re }); // Save file content endpoint -app.put('/api/projects/:projectName/file', authenticateToken, async (req, res) => { +app.put('/api/projects/:projectId/file', authenticateToken, async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { filePath, content } = req.body; @@ -796,7 +533,8 @@ app.put('/api/projects/:projectName/file', authenticateToken, async (req, res) = return res.status(400).json({ error: 'Content is required' }); } - const projectRoot = await extractProjectDirectory(projectName).catch(() => null); + // Projects are now addressed by DB `projectId`, resolved to their path here. + const projectRoot = await projectsDb.getProjectPathById(projectId); if (!projectRoot) { return res.status(404).json({ error: 'Project not found' }); } @@ -830,19 +568,16 @@ app.put('/api/projects/:projectName/file', authenticateToken, async (req, res) = } }); -app.get('/api/projects/:projectName/files', authenticateToken, async (req, res) => { +app.get('/api/projects/:projectId/files', authenticateToken, async (req, res) => { try { // Using fsPromises from import - // Use extractProjectDirectory to get the actual project path - let actualPath; - try { - actualPath = await extractProjectDirectory(req.params.projectName); - } catch (error) { - console.error('Error extracting project directory:', error); - // Fallback to simple dash replacement - actualPath = req.params.projectName.replace(/-/g, '/'); + // Resolve the project's absolute path through the DB (projectId is the + // primary key of the `projects` table after the identifier migration). + const actualPath = await projectsDb.getProjectPathById(req.params.projectId); + if (!actualPath) { + return res.status(404).json({ error: 'Project not found' }); } // Check if path exists @@ -907,10 +642,10 @@ function validateFilename(name) { return { valid: true }; } -// POST /api/projects/:projectName/files/create - Create new file or directory -app.post('/api/projects/:projectName/files/create', authenticateToken, async (req, res) => { +// POST /api/projects/:projectId/files/create - Create new file or directory +app.post('/api/projects/:projectId/files/create', authenticateToken, async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { path: parentPath, type, name } = req.body; // Validate input @@ -927,8 +662,8 @@ app.post('/api/projects/:projectName/files/create', authenticateToken, async (re return res.status(400).json({ error: nameValidation.error }); } - // Get project root - const projectRoot = await extractProjectDirectory(projectName).catch(() => null); + // Resolve the project directory through the DB using the new projectId. + const projectRoot = await projectsDb.getProjectPathById(projectId); if (!projectRoot) { return res.status(404).json({ error: 'Project not found' }); } @@ -984,10 +719,10 @@ app.post('/api/projects/:projectName/files/create', authenticateToken, async (re } }); -// PUT /api/projects/:projectName/files/rename - Rename file or directory -app.put('/api/projects/:projectName/files/rename', authenticateToken, async (req, res) => { +// PUT /api/projects/:projectId/files/rename - Rename file or directory +app.put('/api/projects/:projectId/files/rename', authenticateToken, async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { oldPath, newName } = req.body; // Validate input @@ -1000,8 +735,8 @@ app.put('/api/projects/:projectName/files/rename', authenticateToken, async (req return res.status(400).json({ error: nameValidation.error }); } - // Get project root - const projectRoot = await extractProjectDirectory(projectName).catch(() => null); + // Resolve the project directory through the DB using the new projectId. + const projectRoot = await projectsDb.getProjectPathById(projectId); if (!projectRoot) { return res.status(404).json({ error: 'Project not found' }); } @@ -1061,10 +796,10 @@ app.put('/api/projects/:projectName/files/rename', authenticateToken, async (req } }); -// DELETE /api/projects/:projectName/files - Delete file or directory -app.delete('/api/projects/:projectName/files', authenticateToken, async (req, res) => { +// DELETE /api/projects/:projectId/files - Delete file or directory +app.delete('/api/projects/:projectId/files', authenticateToken, async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { path: targetPath, type } = req.body; // Validate input @@ -1072,8 +807,8 @@ app.delete('/api/projects/:projectName/files', authenticateToken, async (req, re return res.status(400).json({ error: 'Path is required' }); } - // Get project root - const projectRoot = await extractProjectDirectory(projectName).catch(() => null); + // Resolve the project directory through the DB using the new projectId. + const projectRoot = await projectsDb.getProjectPathById(projectId); if (!projectRoot) { return res.status(404).json({ error: 'Project not found' }); } @@ -1126,7 +861,7 @@ app.delete('/api/projects/:projectName/files', authenticateToken, async (req, re } }); -// POST /api/projects/:projectName/files/upload - Upload files +// POST /api/projects/:projectId/files/upload - Upload files // Dynamic import of multer for file uploads const uploadFilesHandler = async (req, res) => { // Dynamic import of multer @@ -1165,7 +900,7 @@ const uploadFilesHandler = async (req, res) => { } try { - const { projectName } = req.params; + const { projectId } = req.params; const { targetPath, relativePaths } = req.body; // Parse relative paths if provided (for folder uploads) @@ -1179,7 +914,7 @@ const uploadFilesHandler = async (req, res) => { } console.log('[DEBUG] File upload request:', { - projectName, + projectId, targetPath: JSON.stringify(targetPath), targetPathType: typeof targetPath, filesCount: req.files?.length, @@ -1190,8 +925,8 @@ const uploadFilesHandler = async (req, res) => { return res.status(400).json({ error: 'No files provided' }); } - // Get project root - const projectRoot = await extractProjectDirectory(projectName).catch(() => null); + // Resolve the project directory through the DB using the new projectId. + const projectRoot = await projectsDb.getProjectPathById(projectId); if (!projectRoot) { return res.status(404).json({ error: 'Project not found' }); } @@ -1288,615 +1023,12 @@ const uploadFilesHandler = async (req, res) => { }); }; -app.post('/api/projects/:projectName/files/upload', authenticateToken, uploadFilesHandler); +app.post('/api/projects/:projectId/files/upload', authenticateToken, uploadFilesHandler); -/** - * Proxy an authenticated client WebSocket to a plugin's internal WS server. - * Auth is enforced by verifyClient before this function is reached. - */ -function handlePluginWsProxy(clientWs, pathname) { - const pluginName = pathname.replace('/plugin-ws/', ''); - if (!pluginName || /[^a-zA-Z0-9_-]/.test(pluginName)) { - clientWs.close(4400, 'Invalid plugin name'); - return; - } - - const port = getPluginPort(pluginName); - if (!port) { - clientWs.close(4404, 'Plugin not running'); - return; - } - - const upstream = new WebSocket(`ws://127.0.0.1:${port}/ws`); - - upstream.on('open', () => { - console.log(`[Plugins] WS proxy connected to "${pluginName}" on port ${port}`); - }); - - // Relay messages bidirectionally - upstream.on('message', (data) => { - if (clientWs.readyState === WebSocket.OPEN) clientWs.send(data); - }); - clientWs.on('message', (data) => { - if (upstream.readyState === WebSocket.OPEN) upstream.send(data); - }); - - // Propagate close in both directions - upstream.on('close', () => { if (clientWs.readyState === WebSocket.OPEN) clientWs.close(); }); - clientWs.on('close', () => { if (upstream.readyState === WebSocket.OPEN) upstream.close(); }); - - upstream.on('error', (err) => { - console.error(`[Plugins] WS proxy error for "${pluginName}":`, err.message); - if (clientWs.readyState === WebSocket.OPEN) clientWs.close(4502, 'Upstream error'); - }); - clientWs.on('error', () => { - if (upstream.readyState === WebSocket.OPEN) upstream.close(); - }); -} - -// WebSocket connection handler that routes based on URL path -wss.on('connection', (ws, request) => { - const url = request.url; - console.log('[INFO] Client connected to:', url); - - // Parse URL to get pathname without query parameters - const urlObj = new URL(url, 'http://localhost'); - const pathname = urlObj.pathname; - - if (pathname === '/shell') { - handleShellConnection(ws); - } else if (pathname === '/ws') { - handleChatConnection(ws, request); - } else if (pathname.startsWith('/plugin-ws/')) { - handlePluginWsProxy(ws, pathname); - } else { - console.log('[WARN] Unknown WebSocket path:', pathname); - ws.close(); - } -}); - -/** - * WebSocket Writer - Wrapper for WebSocket to match SSEStreamWriter interface - * - * Provider files use `createNormalizedMessage()` from `shared/utils.js` and - * adapter `normalizeMessage()` to produce unified NormalizedMessage events. - * The writer simply serialises and sends. - */ -class WebSocketWriter { - constructor(ws, userId = null) { - this.ws = ws; - this.sessionId = null; - this.userId = userId; - this.isWebSocketWriter = true; // Marker for transport detection - } - - send(data) { - if (this.ws.readyState === 1) { // WebSocket.OPEN - this.ws.send(JSON.stringify(data)); - } - } - - updateWebSocket(newRawWs) { - this.ws = newRawWs; - } - - setSessionId(sessionId) { - this.sessionId = sessionId; - } - - getSessionId() { - return this.sessionId; - } -} - -// Handle chat WebSocket connections -function handleChatConnection(ws, request) { - console.log('[INFO] Chat WebSocket connected'); - - // Add to connected clients for project updates - connectedClients.add(ws); - - // Wrap WebSocket with writer for consistent interface with SSEStreamWriter - const writer = new WebSocketWriter(ws, request?.user?.id ?? request?.user?.userId ?? null); - - ws.on('message', async (message) => { - try { - const data = JSON.parse(message); - - if (data.type === 'claude-command') { - console.log('[DEBUG] User message:', data.command || '[Continue/Resume]'); - console.log('📁 Project:', data.options?.projectPath || 'Unknown'); - console.log('🔄 Session:', data.options?.sessionId ? 'Resume' : 'New'); - - // Use Claude Agents SDK - await queryClaudeSDK(data.command, data.options, writer); - } else if (data.type === 'cursor-command') { - console.log('[DEBUG] Cursor message:', data.command || '[Continue/Resume]'); - console.log('📁 Project:', data.options?.cwd || 'Unknown'); - console.log('🔄 Session:', data.options?.sessionId ? 'Resume' : 'New'); - console.log('🤖 Model:', data.options?.model || 'default'); - await spawnCursor(data.command, data.options, writer); - } else if (data.type === 'codex-command') { - console.log('[DEBUG] Codex message:', data.command || '[Continue/Resume]'); - console.log('📁 Project:', data.options?.projectPath || data.options?.cwd || 'Unknown'); - console.log('🔄 Session:', data.options?.sessionId ? 'Resume' : 'New'); - console.log('🤖 Model:', data.options?.model || 'default'); - await queryCodex(data.command, data.options, writer); - } else if (data.type === 'gemini-command') { - console.log('[DEBUG] Gemini message:', data.command || '[Continue/Resume]'); - console.log('📁 Project:', data.options?.projectPath || data.options?.cwd || 'Unknown'); - console.log('🔄 Session:', data.options?.sessionId ? 'Resume' : 'New'); - console.log('🤖 Model:', data.options?.model || 'default'); - await spawnGemini(data.command, data.options, writer); - } else if (data.type === 'cursor-resume') { - // Backward compatibility: treat as cursor-command with resume and no prompt - console.log('[DEBUG] Cursor resume session (compat):', data.sessionId); - await spawnCursor('', { - sessionId: data.sessionId, - resume: true, - cwd: data.options?.cwd - }, writer); - } else if (data.type === 'abort-session') { - console.log('[DEBUG] Abort session request:', data.sessionId); - const provider = data.provider || 'claude'; - let success; - - if (provider === 'cursor') { - success = abortCursorSession(data.sessionId); - } else if (provider === 'codex') { - success = abortCodexSession(data.sessionId); - } else if (provider === 'gemini') { - success = abortGeminiSession(data.sessionId); - } else { - // Use Claude Agents SDK - success = await abortClaudeSDKSession(data.sessionId); - } - - writer.send(createNormalizedMessage({ kind: 'complete', exitCode: success ? 0 : 1, aborted: true, success, sessionId: data.sessionId, provider })); - } else if (data.type === 'claude-permission-response') { - // Relay UI approval decisions back into the SDK control flow. - // This does not persist permissions; it only resolves the in-flight request, - // introduced so the SDK can resume once the user clicks Allow/Deny. - if (data.requestId) { - resolveToolApproval(data.requestId, { - allow: Boolean(data.allow), - updatedInput: data.updatedInput, - message: data.message, - rememberEntry: data.rememberEntry - }); - } - } else if (data.type === 'cursor-abort') { - console.log('[DEBUG] Abort Cursor session:', data.sessionId); - const success = abortCursorSession(data.sessionId); - writer.send(createNormalizedMessage({ kind: 'complete', exitCode: success ? 0 : 1, aborted: true, success, sessionId: data.sessionId, provider: 'cursor' })); - } else if (data.type === 'check-session-status') { - // Check if a specific session is currently processing - const provider = data.provider || 'claude'; - const sessionId = data.sessionId; - let isActive; - - if (provider === 'cursor') { - isActive = isCursorSessionActive(sessionId); - } else if (provider === 'codex') { - isActive = isCodexSessionActive(sessionId); - } else if (provider === 'gemini') { - isActive = isGeminiSessionActive(sessionId); - } else { - // Use Claude Agents SDK - isActive = isClaudeSDKSessionActive(sessionId); - if (isActive) { - // Reconnect the session's writer to the new WebSocket so - // subsequent SDK output flows to the refreshed client. - reconnectSessionWriter(sessionId, ws); - } - } - - writer.send({ - type: 'session-status', - sessionId, - provider, - isProcessing: isActive - }); - } else if (data.type === 'get-pending-permissions') { - // Return pending permission requests for a session - const sessionId = data.sessionId; - if (sessionId && isClaudeSDKSessionActive(sessionId)) { - const pending = getPendingApprovalsForSession(sessionId); - writer.send({ - type: 'pending-permissions-response', - sessionId, - data: pending - }); - } - } else if (data.type === 'get-active-sessions') { - // Get all currently active sessions - const activeSessions = { - claude: getActiveClaudeSDKSessions(), - cursor: getActiveCursorSessions(), - codex: getActiveCodexSessions(), - gemini: getActiveGeminiSessions() - }; - writer.send({ - type: 'active-sessions', - sessions: activeSessions - }); - } - } catch (error) { - console.error('[ERROR] Chat WebSocket error:', error.message); - writer.send({ - type: 'error', - error: error.message - }); - } - }); - - ws.on('close', () => { - console.log('🔌 Chat client disconnected'); - // Remove from connected clients - connectedClients.delete(ws); - }); -} - -// Handle shell WebSocket connections -function handleShellConnection(ws) { - console.log('🐚 Shell client connected'); - let shellProcess = null; - let ptySessionKey = null; - let urlDetectionBuffer = ''; - const announcedAuthUrls = new Set(); - - ws.on('message', async (message) => { - try { - const data = JSON.parse(message); - console.log('📨 Shell message received:', data.type); - - if (data.type === 'init') { - const projectPath = data.projectPath || process.cwd(); - const sessionId = data.sessionId; - const hasSession = data.hasSession; - const provider = data.provider || 'claude'; - const initialCommand = data.initialCommand; - const isPlainShell = data.isPlainShell || (!!initialCommand && !hasSession) || provider === 'plain-shell'; - urlDetectionBuffer = ''; - announcedAuthUrls.clear(); - - // Login commands (Claude/Cursor auth) should never reuse cached sessions - const isLoginCommand = initialCommand && ( - initialCommand.includes('setup-token') || - initialCommand.includes('cursor-agent login') || - initialCommand.includes('auth login') - ); - - // Include command hash in session key so different commands get separate sessions - const commandSuffix = isPlainShell && initialCommand - ? `_cmd_${Buffer.from(initialCommand).toString('base64').slice(0, 16)}` - : ''; - ptySessionKey = `${projectPath}_${sessionId || 'default'}${commandSuffix}`; - - // Kill any existing login session before starting fresh - if (isLoginCommand) { - const oldSession = ptySessionsMap.get(ptySessionKey); - if (oldSession) { - console.log('🧹 Cleaning up existing login session:', ptySessionKey); - if (oldSession.timeoutId) clearTimeout(oldSession.timeoutId); - if (oldSession.pty && oldSession.pty.kill) oldSession.pty.kill(); - ptySessionsMap.delete(ptySessionKey); - } - } - - const existingSession = isLoginCommand ? null : ptySessionsMap.get(ptySessionKey); - if (existingSession) { - console.log('♻️ Reconnecting to existing PTY session:', ptySessionKey); - shellProcess = existingSession.pty; - - clearTimeout(existingSession.timeoutId); - - ws.send(JSON.stringify({ - type: 'output', - data: `\x1b[36m[Reconnected to existing session]\x1b[0m\r\n` - })); - - if (existingSession.buffer && existingSession.buffer.length > 0) { - console.log(`📜 Sending ${existingSession.buffer.length} buffered messages`); - existingSession.buffer.forEach(bufferedData => { - ws.send(JSON.stringify({ - type: 'output', - data: bufferedData - })); - }); - } - - existingSession.ws = ws; - - return; - } - - console.log('[INFO] Starting shell in:', projectPath); - console.log('📋 Session info:', hasSession ? `Resume session ${sessionId}` : (isPlainShell ? 'Plain shell mode' : 'New session')); - console.log('🤖 Provider:', isPlainShell ? 'plain-shell' : provider); - if (initialCommand) { - console.log('⚡ Initial command:', initialCommand); - } - - // First send a welcome message - let welcomeMsg; - if (isPlainShell) { - welcomeMsg = `\x1b[36mStarting terminal in: ${projectPath}\x1b[0m\r\n`; - } else { - const providerName = provider === 'cursor' ? 'Cursor' : (provider === 'codex' ? 'Codex' : (provider === 'gemini' ? 'Gemini' : 'Claude')); - welcomeMsg = hasSession ? - `\x1b[36mResuming ${providerName} session ${sessionId} in: ${projectPath}\x1b[0m\r\n` : - `\x1b[36mStarting new ${providerName} session in: ${projectPath}\x1b[0m\r\n`; - } - - ws.send(JSON.stringify({ - type: 'output', - data: welcomeMsg - })); - - try { - // Validate projectPath — resolve to absolute and verify it exists - const resolvedProjectPath = path.resolve(projectPath); - try { - const stats = fs.statSync(resolvedProjectPath); - if (!stats.isDirectory()) { - throw new Error('Not a directory'); - } - } catch (pathErr) { - ws.send(JSON.stringify({ type: 'error', message: 'Invalid project path' })); - return; - } - - // Validate sessionId — only allow safe characters - const safeSessionIdPattern = /^[a-zA-Z0-9_.\-:]+$/; - if (sessionId && !safeSessionIdPattern.test(sessionId)) { - ws.send(JSON.stringify({ type: 'error', message: 'Invalid session ID' })); - return; - } - - // Build shell command — use cwd for project path (never interpolate into shell string) - let shellCommand; - if (isPlainShell) { - // Plain shell mode - run the initial command in the project directory - shellCommand = initialCommand; - } else if (provider === 'cursor') { - if (hasSession && sessionId) { - shellCommand = `cursor-agent --resume="${sessionId}"`; - } else { - shellCommand = 'cursor-agent'; - } - } else if (provider === 'codex') { - // Use codex command; attempt to resume and fall back to a new session when the resume fails. - if (hasSession && sessionId) { - if (os.platform() === 'win32') { - // PowerShell syntax for fallback - shellCommand = `codex resume "${sessionId}"; if ($LASTEXITCODE -ne 0) { codex }`; - } else { - shellCommand = `codex resume "${sessionId}" || codex`; - } - } else { - shellCommand = 'codex'; - } - } else if (provider === 'gemini') { - const command = initialCommand || 'gemini'; - let resumeId = sessionId; - if (hasSession && sessionId) { - try { - // Gemini CLI enforces its own native session IDs, unlike other agents that accept arbitrary string names. - // The UI only knows about its internal generated `sessionId` (e.g. gemini_1234). - // We must fetch the mapping from the backend session manager to pass the native `cliSessionId` to the shell. - const sess = sessionManager.getSession(sessionId); - if (sess && sess.cliSessionId) { - resumeId = sess.cliSessionId; - // Validate the looked-up CLI session ID too - if (!safeSessionIdPattern.test(resumeId)) { - resumeId = null; - } - } - } catch (err) { - console.error('Failed to get Gemini CLI session ID:', err); - } - } - - if (hasSession && resumeId) { - shellCommand = `${command} --resume "${resumeId}"`; - } else { - shellCommand = command; - } - } else { - // Claude (default provider) - const command = initialCommand || 'claude'; - if (hasSession && sessionId) { - if (os.platform() === 'win32') { - shellCommand = `claude --resume "${sessionId}"; if ($LASTEXITCODE -ne 0) { claude }`; - } else { - shellCommand = `claude --resume "${sessionId}" || claude`; - } - } else { - shellCommand = command; - } - } - - console.log('🔧 Executing shell command:', shellCommand); - - // Use appropriate shell based on platform - const shell = os.platform() === 'win32' ? 'powershell.exe' : 'bash'; - const shellArgs = os.platform() === 'win32' ? ['-Command', shellCommand] : ['-c', shellCommand]; - - // Use terminal dimensions from client if provided, otherwise use defaults - const termCols = data.cols || 80; - const termRows = data.rows || 24; - console.log('📐 Using terminal dimensions:', termCols, 'x', termRows); - - shellProcess = pty.spawn(shell, shellArgs, { - name: 'xterm-256color', - cols: termCols, - rows: termRows, - cwd: resolvedProjectPath, - env: { - ...process.env, - TERM: 'xterm-256color', - COLORTERM: 'truecolor', - FORCE_COLOR: '3' - } - }); - - console.log('🟢 Shell process started with PTY, PID:', shellProcess.pid); - - ptySessionsMap.set(ptySessionKey, { - pty: shellProcess, - ws: ws, - buffer: [], - timeoutId: null, - projectPath, - sessionId - }); - - // Handle data output - shellProcess.onData((data) => { - const session = ptySessionsMap.get(ptySessionKey); - if (!session) return; - - if (session.buffer.length < 5000) { - session.buffer.push(data); - } else { - session.buffer.shift(); - session.buffer.push(data); - } - - if (session.ws && session.ws.readyState === WebSocket.OPEN) { - let outputData = data; - - const cleanChunk = stripAnsiSequences(data); - urlDetectionBuffer = `${urlDetectionBuffer}${cleanChunk}`.slice(-SHELL_URL_PARSE_BUFFER_LIMIT); - - outputData = outputData.replace( - /OPEN_URL:\s*(https?:\/\/[^\s\x1b\x07]+)/g, - '[INFO] Opening in browser: $1' - ); - - const emitAuthUrl = (detectedUrl, autoOpen = false) => { - const normalizedUrl = normalizeDetectedUrl(detectedUrl); - if (!normalizedUrl) return; - - const isNewUrl = !announcedAuthUrls.has(normalizedUrl); - if (isNewUrl) { - announcedAuthUrls.add(normalizedUrl); - session.ws.send(JSON.stringify({ - type: 'auth_url', - url: normalizedUrl, - autoOpen - })); - } - - }; - - const normalizedDetectedUrls = extractUrlsFromText(urlDetectionBuffer) - .map((url) => normalizeDetectedUrl(url)) - .filter(Boolean); - - // Prefer the most complete URL if shorter prefix variants are also present. - const dedupedDetectedUrls = Array.from(new Set(normalizedDetectedUrls)).filter((url, _, urls) => - !urls.some((otherUrl) => otherUrl !== url && otherUrl.startsWith(url)) - ); - - dedupedDetectedUrls.forEach((url) => emitAuthUrl(url, false)); - - if (shouldAutoOpenUrlFromOutput(cleanChunk) && dedupedDetectedUrls.length > 0) { - const bestUrl = dedupedDetectedUrls.reduce((longest, current) => - current.length > longest.length ? current : longest - ); - emitAuthUrl(bestUrl, true); - } - - // Send regular output - session.ws.send(JSON.stringify({ - type: 'output', - data: outputData - })); - } - }); - - // Handle process exit - shellProcess.onExit((exitCode) => { - console.log('🔚 Shell process exited with code:', exitCode.exitCode, 'signal:', exitCode.signal); - const session = ptySessionsMap.get(ptySessionKey); - if (session && session.ws && session.ws.readyState === WebSocket.OPEN) { - session.ws.send(JSON.stringify({ - type: 'output', - data: `\r\n\x1b[33mProcess exited with code ${exitCode.exitCode}${exitCode.signal ? ` (${exitCode.signal})` : ''}\x1b[0m\r\n` - })); - } - if (session && session.timeoutId) { - clearTimeout(session.timeoutId); - } - ptySessionsMap.delete(ptySessionKey); - shellProcess = null; - }); - - } catch (spawnError) { - console.error('[ERROR] Error spawning process:', spawnError); - ws.send(JSON.stringify({ - type: 'output', - data: `\r\n\x1b[31mError: ${spawnError.message}\x1b[0m\r\n` - })); - } - - } else if (data.type === 'input') { - // Send input to shell process - if (shellProcess && shellProcess.write) { - try { - shellProcess.write(data.data); - } catch (error) { - console.error('Error writing to shell:', error); - } - } else { - console.warn('No active shell process to send input to'); - } - } else if (data.type === 'resize') { - // Handle terminal resize - if (shellProcess && shellProcess.resize) { - console.log('Terminal resize requested:', data.cols, 'x', data.rows); - shellProcess.resize(data.cols, data.rows); - } - } - } catch (error) { - console.error('[ERROR] Shell WebSocket error:', error.message); - if (ws.readyState === WebSocket.OPEN) { - ws.send(JSON.stringify({ - type: 'output', - data: `\r\n\x1b[31mError: ${error.message}\x1b[0m\r\n` - })); - } - } - }); - - ws.on('close', () => { - console.log('🔌 Shell client disconnected'); - - if (ptySessionKey) { - const session = ptySessionsMap.get(ptySessionKey); - if (session) { - console.log('⏳ PTY session kept alive, will timeout in 30 minutes:', ptySessionKey); - session.ws = null; - - session.timeoutId = setTimeout(() => { - console.log('⏰ PTY session timeout, killing process:', ptySessionKey); - if (session.pty && session.pty.kill) { - session.pty.kill(); - } - ptySessionsMap.delete(ptySessionKey); - }, PTY_SESSION_TIMEOUT); - } - } - }); - - ws.on('error', (error) => { - console.error('[ERROR] Shell WebSocket error:', error); - }); -} -// Image upload endpoint -app.post('/api/projects/:projectName/upload-images', authenticateToken, async (req, res) => { +// Image upload endpoint. Accepts the DB-assigned `projectId` (not a folder name) +// but the current implementation doesn't need to touch the project directory, +// so we just leave the param rename for consistency with the rest of the API. +app.post('/api/projects/:projectId/upload-images', authenticateToken, async (req, res) => { try { const multer = (await import('multer')).default; const path = (await import('path')).default; @@ -1980,10 +1112,11 @@ app.post('/api/projects/:projectName/upload-images', authenticateToken, async (r } }); -// Get token usage for a specific session -app.get('/api/projects/:projectName/sessions/:sessionId/token-usage', authenticateToken, async (req, res) => { +// Get token usage for a specific session. `projectId` is the DB primary key; +// the Claude branch below resolves it to an absolute path via the DB. +app.get('/api/projects/:projectId/sessions/:sessionId/token-usage', authenticateToken, async (req, res) => { try { - const { projectName, sessionId } = req.params; + const { projectId, sessionId } = req.params; const { provider = 'claude' } = req.query; const homeDir = os.homedir(); @@ -2087,13 +1220,13 @@ app.get('/api/projects/:projectName/sessions/:sessionId/token-usage', authentica } // Handle Claude sessions (default) - // Extract actual project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { - console.error('Error extracting project directory:', error); - return res.status(500).json({ error: 'Failed to determine project path' }); + // Resolve the project path through the DB using the caller-supplied + // `projectId`. Legacy code here called extractProjectDirectory with a + // folder-encoded project name; the migration centralizes that lookup + // in the projects table. + const projectPath = await projectsDb.getProjectPathById(projectId); + if (!projectPath) { + return res.status(404).json({ error: 'Project not found' }); } // Construct the JSONL file path @@ -2343,7 +1476,7 @@ async function startServer() { console.log(''); // Start watching the projects folder for changes - await setupProjectsWatcher(); + await initializeSessionsWatcher(); // Start server-side plugin processes for enabled plugins startEnabledPluginServers().catch(err => { @@ -2351,6 +1484,7 @@ async function startServer() { }); }); + await closeSessionsWatcher(); // Clean up plugin processes on shutdown const shutdownPlugins = async () => { await stopAllPlugins(); diff --git a/server/middleware/auth.js b/server/middleware/auth.js index 73749792..c40237b2 100644 --- a/server/middleware/auth.js +++ b/server/middleware/auth.js @@ -1,5 +1,5 @@ import jwt from 'jsonwebtoken'; -import { userDb, appConfigDb } from '../database/db.js'; +import { userDb, appConfigDb } from '../modules/database/index.js'; import { IS_PLATFORM } from '../constants/config.js'; // Use env var if set, otherwise auto-generate a unique secret per installation diff --git a/server/modules/database/connection.ts b/server/modules/database/connection.ts new file mode 100644 index 00000000..4453b0e3 --- /dev/null +++ b/server/modules/database/connection.ts @@ -0,0 +1,143 @@ +/** + * Database connection management. + * + * Owns the single SQLite connection used across all repositories. + * Handles path resolution, directory creation, legacy database migration, + * and eager app_config bootstrap so the auth middleware can read the + * JWT secret before the full schema is applied. + * + * Consumers should never create their own Database instance — they use + * `getConnection()` to obtain the shared singleton. + */ + +import Database from 'better-sqlite3'; +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +import { APP_CONFIG_TABLE_SCHEMA_SQL } from '@/modules/database/schema.js'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// --------------------------------------------------------------------------- +// Path resolution +// --------------------------------------------------------------------------- + +/** + * Resolves the database file path from environment or falls back + * to the legacy location inside the server/database/ folder. + * + * Priority: + * 1. DATABASE_PATH environment variable (set by cli.js or load-env-vars.js) + * 2. Legacy path: server/database/auth.db + */ +function resolveDatabasePath(): string { + // process.env.DATABASE_PATH is set by load-env-vars.js to either the .env value or a default(~/.cloudcli/auth.db) in the user's home directory. + return process.env.DATABASE_PATH || resolveLegacyDatabasePath(); +} + +/** + * Resolves the legacy database path (always inside server/database/). + * Used for the one-time migration to the new external location. + */ +function resolveLegacyDatabasePath(): string { + const serverDir = path.resolve(__dirname, '..', '..', '..'); + return path.join(serverDir, 'database', 'auth.db'); +} + +// --------------------------------------------------------------------------- +// Directory & migration helpers +// --------------------------------------------------------------------------- + +function ensureDatabaseDirectory(dbPath: string): void { + const dir = path.dirname(dbPath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + console.log('Created database directory:', dir); + } +} + +/** + * If the database was moved to an external location (e.g. ~/.cloudcli/) + * but the user still has a legacy auth.db inside the install directory, + * copy it to the new location as a one-time migration. + */ +function migrateLegacyDatabase(targetPath: string): void { + const legacyPath = resolveLegacyDatabasePath(); + + if (targetPath === legacyPath) return; + if (fs.existsSync(targetPath)) return; + if (!fs.existsSync(legacyPath)) return; + + try { + fs.copyFileSync(legacyPath, targetPath); + console.log('Migrated legacy database', { from: legacyPath, to: targetPath }); + + + // copy the write-ahead log and shared memory files (auth.db-wal, auth.db-shm) if they exist, to preserve any uncommitted transactions + for (const suffix of ['-wal', '-shm']) { + const src = legacyPath + suffix; + if (fs.existsSync(src)) { + fs.copyFileSync(src, targetPath + suffix); + } + } + } catch (err: any) { + console.error('Could not migrate legacy database', { error: err.message }); + } +} + + +// --------------------------------------------------------------------------- +// Singleton connection +// --------------------------------------------------------------------------- + +let instance: Database.Database | null = null; + +/** + * Returns the shared database connection, creating it on first call. + * + * The first invocation: + * 1. Resolves the target database path + * 2. Ensures the parent directory exists + * 3. Migrates from the legacy install-directory path if needed + * 4. Opens the SQLite connection + * 5. Eagerly creates the app_config table (auth reads JWT secret at import time) + * 6. Logs the database location + */ +export function getConnection(): Database.Database { + if (instance) return instance; + + const dbPath = resolveDatabasePath(); + + ensureDatabaseDirectory(dbPath); + migrateLegacyDatabase(dbPath); + + instance = new Database(dbPath); + + // app_config must exist immediately — the auth middleware reads + // the JWT secret at module-load time, before initializeDatabase() runs. + instance.exec(APP_CONFIG_TABLE_SCHEMA_SQL); + + return instance; +} + +/** + * Returns the resolved database file path without opening a connection. + * Useful for diagnostics and CLI status commands. + */ +export function getDatabasePath(): string { + return resolveDatabasePath(); +} + +/** + * Closes the database connection and clears the singleton. + * Primarily used for graceful shutdown or testing. + */ +export function closeConnection(): void { + if (instance) { + instance.close(); + instance = null; + console.log('Database connection closed'); + } +} diff --git a/server/modules/database/index.ts b/server/modules/database/index.ts new file mode 100644 index 00000000..787521ed --- /dev/null +++ b/server/modules/database/index.ts @@ -0,0 +1,12 @@ +export { initializeDatabase } from '@/modules/database/init-db.js'; +export { apiKeysDb } from '@/modules/database/repositories/api-keys.js'; +export { appConfigDb } from '@/modules/database/repositories/app-config.js'; +export { credentialsDb } from '@/modules/database/repositories/credentials.js'; +export { githubTokensDb } from '@/modules/database/repositories/github-tokens.js'; +export { notificationPreferencesDb } from '@/modules/database/repositories/notification-preferences.js'; +export { projectsDb } from '@/modules/database/repositories/projects.db.js'; +export { pushSubscriptionsDb } from '@/modules/database/repositories/push-subscriptions.js'; +export { scanStateDb } from '@/modules/database/repositories/scan-state.db.js'; +export { sessionsDb } from '@/modules/database/repositories/sessions.db.js'; +export { userDb } from '@/modules/database/repositories/users.js'; +export { vapidKeysDb } from '@/modules/database/repositories/vapid-keys.js'; diff --git a/server/modules/database/init-db.ts b/server/modules/database/init-db.ts new file mode 100644 index 00000000..8dfcc4ee --- /dev/null +++ b/server/modules/database/init-db.ts @@ -0,0 +1,17 @@ +import { getConnection } from "@/modules/database/connection.js"; +import { runMigrations } from "@/modules/database/migrations.js"; +import { INIT_SCHEMA_SQL } from "@/modules/database/schema.js"; + +// Initialize database with schema +export const initializeDatabase = async () => { + try { + const db = getConnection(); + db.exec(INIT_SCHEMA_SQL); + console.log('Database schema applied'); + runMigrations(db); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.log('Database initialization failed', { error: message }); + throw err; + } +}; diff --git a/server/modules/database/migrations.ts b/server/modules/database/migrations.ts new file mode 100644 index 00000000..390f83ff --- /dev/null +++ b/server/modules/database/migrations.ts @@ -0,0 +1,442 @@ +import { Database } from 'better-sqlite3'; + +import { + APP_CONFIG_TABLE_SCHEMA_SQL, + LAST_SCANNED_AT_SQL, + PROJECTS_TABLE_SCHEMA_SQL, + PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL, + SESSIONS_TABLE_SCHEMA_SQL, + USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL, + VAPID_KEYS_TABLE_SCHEMA_SQL, +} from '@/modules/database/schema.js'; + +const SQLITE_UUID_SQL = ` +lower(hex(randomblob(4))) || '-' || +lower(hex(randomblob(2))) || '-' || +lower(hex(randomblob(2))) || '-' || +lower(hex(randomblob(2))) || '-' || +lower(hex(randomblob(6))) +`; + +type TableInfoRow = { + name: string; + pk: number; +}; + +const addColumnToTableIfNotExists = ( + db: Database, + tableName: string, + columnNames: string[], + columnName: string, + columnType: string +) => { + if (!columnNames.includes(columnName)) { + console.log(`Running migration: Adding ${columnName} column to ${tableName} table`); + db.exec(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType}`); + } +}; + +const tableExists = (db: Database, tableName: string): boolean => + Boolean( + db + .prepare("SELECT name FROM sqlite_master WHERE type = 'table' AND name = ?") + .get(tableName) + ); + +const getTableInfo = (db: Database, tableName: string): TableInfoRow[] => + db.prepare(`PRAGMA table_info(${tableName})`).all() as TableInfoRow[]; + +const migrateLegacySessionNames = (db: Database): void => { + const hasLegacySessionNamesTable = tableExists(db, 'session_names'); + const hasSessionsTable = tableExists(db, 'sessions'); + + if (!hasLegacySessionNamesTable) { + return; + } + + if (hasSessionsTable) { + console.log('Running migration: Merging session_names into sessions'); + db.exec(` + INSERT INTO sessions (session_id, provider, custom_name, created_at, updated_at) + SELECT + session_id, + COALESCE(provider, 'claude'), + custom_name, + COALESCE(created_at, CURRENT_TIMESTAMP), + COALESCE(updated_at, CURRENT_TIMESTAMP) + FROM session_names + ON CONFLICT(session_id) DO UPDATE SET + provider = excluded.provider, + custom_name = COALESCE(excluded.custom_name, sessions.custom_name), + created_at = COALESCE(sessions.created_at, excluded.created_at), + updated_at = COALESCE(excluded.updated_at, sessions.updated_at) + `); + db.exec('DROP TABLE session_names'); + return; + } + + console.log('Running migration: Renaming session_names table to sessions'); + db.exec('ALTER TABLE session_names RENAME TO sessions'); +}; + +const migrateLegacyWorkspaceTableIntoProjects = (db: Database): void => { + db.exec(PROJECTS_TABLE_SCHEMA_SQL); + + if (!tableExists(db, 'workspace_original_paths')) { + return; + } + + console.log('Running migration: Migrating workspace_original_paths data into projects'); + db.exec(` + INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived) + SELECT + CASE + WHEN workspace_id IS NULL OR trim(workspace_id) = '' + THEN ${SQLITE_UUID_SQL} + ELSE workspace_id + END, + workspace_path, + custom_workspace_name, + COALESCE(isStarred, 0), + 0 + FROM workspace_original_paths + WHERE workspace_path IS NOT NULL AND trim(workspace_path) <> '' + ON CONFLICT(project_path) DO UPDATE SET + custom_project_name = COALESCE(projects.custom_project_name, excluded.custom_project_name), + isStarred = COALESCE(projects.isStarred, excluded.isStarred) + `); +}; + +const rebuildProjectsTableWithPrimaryKeySchema = (db: Database): void => { + const hasProjectsTable = tableExists(db, 'projects'); + if (!hasProjectsTable) { + db.exec(PROJECTS_TABLE_SCHEMA_SQL); + return; + } + + const projectsTableInfo = getTableInfo(db, 'projects'); + const columnNames = projectsTableInfo.map((column) => column.name); + const hasProjectIdPrimaryKey = projectsTableInfo.some( + (column) => column.name === 'project_id' && column.pk === 1, + ); + + if (hasProjectIdPrimaryKey) { + addColumnToTableIfNotExists(db, 'projects', columnNames, 'custom_project_name', 'TEXT DEFAULT NULL'); + addColumnToTableIfNotExists(db, 'projects', columnNames, 'isStarred', 'BOOLEAN DEFAULT 0'); + addColumnToTableIfNotExists(db, 'projects', columnNames, 'isArchived', 'BOOLEAN DEFAULT 0'); + db.exec(` + UPDATE projects + SET project_id = ${SQLITE_UUID_SQL} + WHERE project_id IS NULL OR trim(project_id) = '' + `); + return; + } + + console.log('Running migration: Rebuilding projects table to enforce project_id primary key'); + + const projectPathExpression = columnNames.includes('project_path') + ? 'project_path' + : columnNames.includes('workspace_path') + ? 'workspace_path' + : 'NULL'; + + const customProjectNameExpression = columnNames.includes('custom_project_name') + ? 'custom_project_name' + : columnNames.includes('custom_workspace_name') + ? 'custom_workspace_name' + : 'NULL'; + + const isStarredExpression = columnNames.includes('isStarred') ? 'COALESCE(isStarred, 0)' : '0'; + + const isArchivedExpression = columnNames.includes('isArchived') ? 'COALESCE(isArchived, 0)' : '0'; + + const projectIdExpression = columnNames.includes('project_id') + ? `CASE + WHEN project_id IS NULL OR trim(project_id) = '' + THEN ${SQLITE_UUID_SQL} + ELSE project_id + END` + : SQLITE_UUID_SQL; + + db.exec('PRAGMA foreign_keys = OFF'); + try { + db.exec('BEGIN TRANSACTION'); + db.exec('DROP TABLE IF EXISTS projects__new'); + db.exec(` + CREATE TABLE projects__new ( + project_id TEXT PRIMARY KEY NOT NULL, + project_path TEXT NOT NULL UNIQUE, + custom_project_name TEXT DEFAULT NULL, + isStarred BOOLEAN DEFAULT 0, + isArchived BOOLEAN DEFAULT 0 + ) + `); + db.exec(` + WITH source_rows AS ( + SELECT + ${projectPathExpression} AS project_path, + ${customProjectNameExpression} AS custom_project_name, + ${isStarredExpression} AS isStarred, + ${isArchivedExpression} AS isArchived, + ${projectIdExpression} AS candidate_project_id, + rowid AS source_rowid + FROM projects + WHERE ${projectPathExpression} IS NOT NULL AND trim(${projectPathExpression}) <> '' + ), + deduped_paths AS ( + SELECT + project_path, + custom_project_name, + isStarred, + isArchived, + candidate_project_id, + source_rowid, + ROW_NUMBER() OVER (PARTITION BY project_path ORDER BY source_rowid) AS project_path_rank + FROM source_rows + ), + prepared_rows AS ( + SELECT + CASE + WHEN ROW_NUMBER() OVER (PARTITION BY candidate_project_id ORDER BY source_rowid) = 1 + THEN candidate_project_id + ELSE ${SQLITE_UUID_SQL} + END AS project_id, + project_path, + custom_project_name, + isStarred, + isArchived + FROM deduped_paths + WHERE project_path_rank = 1 + ) + INSERT INTO projects__new ( + project_id, + project_path, + custom_project_name, + isStarred, + isArchived + ) + SELECT + project_id, + project_path, + custom_project_name, + isStarred, + isArchived + FROM prepared_rows + `); + db.exec('DROP TABLE projects'); + db.exec('ALTER TABLE projects__new RENAME TO projects'); + db.exec('COMMIT'); + } catch (migrationError) { + db.exec('ROLLBACK'); + throw migrationError; + } finally { + db.exec('PRAGMA foreign_keys = ON'); + } +}; + +const rebuildSessionsTableWithProjectSchema = (db: Database): void => { + const hasSessions = tableExists(db, 'sessions'); + if (!hasSessions) { + db.exec(SESSIONS_TABLE_SCHEMA_SQL); + return; + } + + const sessionsTableInfo = getTableInfo(db, 'sessions'); + const columnNames = sessionsTableInfo.map((column) => column.name); + const primaryKeyColumns = sessionsTableInfo + .filter((column) => column.pk > 0) + .sort((a, b) => a.pk - b.pk) + .map((column) => column.name); + + const shouldRebuild = + !columnNames.includes('project_path') || + primaryKeyColumns.length !== 1 || + primaryKeyColumns[0] !== 'session_id' || + !columnNames.includes('provider'); + + if (!shouldRebuild) { + addColumnToTableIfNotExists(db, 'sessions', columnNames, 'jsonl_path', 'TEXT'); + addColumnToTableIfNotExists(db, 'sessions', columnNames, 'created_at', 'DATETIME'); + addColumnToTableIfNotExists(db, 'sessions', columnNames, 'updated_at', 'DATETIME'); + db.exec('UPDATE sessions SET created_at = COALESCE(created_at, CURRENT_TIMESTAMP)'); + db.exec('UPDATE sessions SET updated_at = COALESCE(updated_at, CURRENT_TIMESTAMP)'); + return; + } + + console.log('Running migration: Rebuilding sessions table to project-based schema'); + + const projectPathExpression = columnNames.includes('project_path') + ? 'project_path' + : columnNames.includes('workspace_path') + ? 'workspace_path' + : 'NULL'; + + const providerExpression = columnNames.includes('provider') + ? "COALESCE(provider, 'claude')" + : "'claude'"; + + const customNameExpression = columnNames.includes('custom_name') + ? 'custom_name' + : 'NULL'; + + const jsonlPathExpression = columnNames.includes('jsonl_path') + ? 'jsonl_path' + : 'NULL'; + + const createdAtExpression = columnNames.includes('created_at') + ? 'COALESCE(created_at, CURRENT_TIMESTAMP)' + : 'CURRENT_TIMESTAMP'; + + const updatedAtExpression = columnNames.includes('updated_at') + ? 'COALESCE(updated_at, CURRENT_TIMESTAMP)' + : 'CURRENT_TIMESTAMP'; + + db.exec('PRAGMA foreign_keys = OFF'); + try { + db.exec('BEGIN TRANSACTION'); + db.exec('DROP TABLE IF EXISTS sessions__new'); + db.exec(` + CREATE TABLE sessions__new ( + session_id TEXT NOT NULL, + provider TEXT NOT NULL DEFAULT 'claude', + custom_name TEXT, + project_path TEXT, + jsonl_path TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (session_id), + FOREIGN KEY (project_path) REFERENCES projects(project_path) + ON DELETE SET NULL + ON UPDATE CASCADE + ) + `); + db.exec(` + WITH source_rows AS ( + SELECT + session_id, + ${providerExpression} AS provider, + ${customNameExpression} AS custom_name, + ${projectPathExpression} AS project_path, + ${jsonlPathExpression} AS jsonl_path, + ${createdAtExpression} AS created_at, + ${updatedAtExpression} AS updated_at, + rowid AS source_rowid + FROM sessions + WHERE session_id IS NOT NULL AND trim(session_id) <> '' + ), + ranked_rows AS ( + SELECT + session_id, + provider, + custom_name, + project_path, + jsonl_path, + created_at, + updated_at, + ROW_NUMBER() OVER ( + PARTITION BY session_id + ORDER BY datetime(COALESCE(updated_at, created_at)) DESC, source_rowid DESC + ) AS session_rank + FROM source_rows + ) + INSERT INTO sessions__new ( + session_id, + provider, + custom_name, + project_path, + jsonl_path, + created_at, + updated_at + ) + SELECT + session_id, + provider, + custom_name, + project_path, + jsonl_path, + created_at, + updated_at + FROM ranked_rows + WHERE session_rank = 1 + `); + db.exec('DROP TABLE sessions'); + db.exec('ALTER TABLE sessions__new RENAME TO sessions'); + db.exec('COMMIT'); + } catch (migrationError) { + db.exec('ROLLBACK'); + throw migrationError; + } finally { + db.exec('PRAGMA foreign_keys = ON'); + } +}; + +const ensureProjectsForSessionPaths = (db: Database): void => { + if (!tableExists(db, 'sessions')) { + return; + } + + db.exec(` + INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived) + SELECT + ${SQLITE_UUID_SQL}, + project_path, + NULL, + 0, + 0 + FROM sessions + WHERE project_path IS NOT NULL AND trim(project_path) <> '' + ON CONFLICT(project_path) DO NOTHING + `); +}; + +export const runMigrations = (db: Database) => { + try { + const usersTableInfo = db.prepare('PRAGMA table_info(users)').all() as { name: string }[]; + const userColumnNames = usersTableInfo.map((column) => column.name); + + addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_name', 'TEXT'); + addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_email', 'TEXT'); + addColumnToTableIfNotExists( + db, + 'users', + userColumnNames, + 'has_completed_onboarding', + 'BOOLEAN DEFAULT 0' + ); + + db.exec(APP_CONFIG_TABLE_SCHEMA_SQL); + db.exec(USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL); + db.exec(VAPID_KEYS_TABLE_SCHEMA_SQL); + db.exec(PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL); + db.exec('CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id)'); + + db.exec(PROJECTS_TABLE_SCHEMA_SQL); + rebuildProjectsTableWithPrimaryKeySchema(db); + + migrateLegacyWorkspaceTableIntoProjects(db); + rebuildSessionsTableWithProjectSchema(db); + migrateLegacySessionNames(db); + ensureProjectsForSessionPaths(db); + + db.exec('CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id)'); + db.exec('CREATE INDEX IF NOT EXISTS idx_sessions_project_path ON sessions(project_path)'); + db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_starred ON projects(isStarred)'); + db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_archived ON projects(isArchived)'); + + db.exec('DROP INDEX IF EXISTS idx_session_names_lookup'); + db.exec('DROP INDEX IF EXISTS idx_sessions_workspace_path'); + db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_is_starred'); + db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_workspace_id'); + + if (tableExists(db, 'workspace_original_paths')) { + console.log('Running migration: Dropping legacy workspace_original_paths table'); + db.exec('DROP TABLE workspace_original_paths'); + } + + db.exec(LAST_SCANNED_AT_SQL); + console.log('Database migrations completed successfully'); + } catch (error: any) { + console.error('Error running migrations:', error.message); + throw error; + } +}; diff --git a/server/modules/database/repositories/api-keys.ts b/server/modules/database/repositories/api-keys.ts new file mode 100644 index 00000000..f2c06f07 --- /dev/null +++ b/server/modules/database/repositories/api-keys.ts @@ -0,0 +1,119 @@ +/** + * API keys repository. + * + * Manages API keys used for external/programmatic access to the backend. + * Keys are prefixed with `ck_` and tied to a user via foreign key. + */ + +import crypto from 'crypto'; + +import { getConnection } from '@/modules/database/connection.js'; + +type ApiKeyRow = { + id: number; + key_name: string; + api_key: string; + created_at: string; + last_used: string | null; + is_active: number; +}; + +type CreateApiKeyResult = { + id: number | bigint; + keyName: string; + apiKey: string; +}; + +type ValidatedApiKeyUser = { + id: number; + username: string; + api_key_id: number; +}; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/** Generates a cryptographically random API key with the `ck_` prefix. */ +function generateApiKey(): string { + return 'ck_' + crypto.randomBytes(32).toString('hex'); +} + +// --------------------------------------------------------------------------- +// Queries +// --------------------------------------------------------------------------- + +export const apiKeysDb = { + generateApiKey, + + /** Creates a new API key for the given user and returns it for one-time display. */ + createApiKey(userId: number, keyName: string): CreateApiKeyResult { + const db = getConnection(); + const apiKey = generateApiKey(); + const result = db + .prepare( + 'INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)' + ) + .run(userId, keyName, apiKey); + return { id: result.lastInsertRowid, keyName, apiKey }; + }, + + /** Lists all API keys for a user, most recent first. */ + getApiKeys(userId: number): ApiKeyRow[] { + const db = getConnection(); + return db + .prepare( + 'SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC' + ) + .all(userId) as ApiKeyRow[]; + }, + + /** + * Validates an API key and resolves the owning user. + * If the key is valid, its `last_used` timestamp is updated as a side effect. + * Returns undefined when the key is invalid or the user is inactive. + */ + validateApiKey(apiKey: string): ValidatedApiKeyUser | undefined { + const db = getConnection(); + const row = db + .prepare( + `SELECT u.id, u.username, ak.id as api_key_id + FROM api_keys ak + JOIN users u ON ak.user_id = u.id + WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1` + ) + .get(apiKey) as ValidatedApiKeyUser | undefined; + + if (row) { + db.prepare( + 'UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?' + ).run(row.api_key_id); + } + + return row; + }, + + /** Permanently removes an API key. Returns true if a row was deleted. */ + deleteApiKey(userId: number, apiKeyId: number): boolean { + const db = getConnection(); + const result = db + .prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?') + .run(apiKeyId, userId); + return result.changes > 0; + }, + + /** Enables or disables an API key without deleting it. */ + toggleApiKey( + userId: number, + apiKeyId: number, + isActive: boolean + ): boolean { + const db = getConnection(); + const result = db + .prepare( + 'UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?' + ) + .run(isActive ? 1 : 0, apiKeyId, userId); + return result.changes > 0; + }, +}; diff --git a/server/modules/database/repositories/app-config.ts b/server/modules/database/repositories/app-config.ts new file mode 100644 index 00000000..691a1b4e --- /dev/null +++ b/server/modules/database/repositories/app-config.ts @@ -0,0 +1,53 @@ +/** + * App config repository. + * + * Key-value store for application-level configuration that persists + * across restarts (JWT secret, feature flags, etc.). Values are always + * stored as strings; callers handle parsing. + */ + +import crypto from 'crypto'; + +import { getConnection } from '@/modules/database/connection.js'; + +// --------------------------------------------------------------------------- +// Queries +// --------------------------------------------------------------------------- + +export const appConfigDb = { + /** Returns the stored value for a config key, or null if missing. */ + get(key: string): string | null { + try { + const db = getConnection(); + const row = db + .prepare('SELECT value FROM app_config WHERE key = ?') + .get(key) as { value: string } | undefined; + return row?.value ?? null; + } catch { + // Swallow errors so early-startup reads (e.g. JWT secret) do not crash. + return null; + } + }, + + /** Inserts or updates a config key (upsert). */ + set(key: string, value: string): void { + const db = getConnection(); + db.prepare( + 'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value' + ).run(key, value); + }, + + /** + * Returns the JWT signing secret, generating and persisting one + * if it does not already exist. This ensures the secret survives + * server restarts while being created automatically on first boot. + */ + getOrCreateJwtSecret(): string { + let secret = appConfigDb.get('jwt_secret'); + if (!secret) { + secret = crypto.randomBytes(64).toString('hex'); + appConfigDb.set('jwt_secret', secret); + } + return secret; + }, +}; diff --git a/server/modules/database/repositories/credentials.ts b/server/modules/database/repositories/credentials.ts new file mode 100644 index 00000000..8bb11107 --- /dev/null +++ b/server/modules/database/repositories/credentials.ts @@ -0,0 +1,106 @@ +/** + * User credentials repository. + * + * Manages external service tokens (GitHub, GitLab, Bitbucket, etc.) + * stored per-user. Each credential has a type discriminator so multiple + * credential kinds can coexist in the same table. + */ + +import { getConnection } from '@/modules/database/connection.js'; +import type { + CreateCredentialResult, + CredentialPublicRow, +} from '@/shared/types.js'; + +// --------------------------------------------------------------------------- +// Queries +// --------------------------------------------------------------------------- + +export const credentialsDb = { + /** Stores a new credential and returns a safe (no raw value) result. */ + createCredential( + userId: number, + credentialName: string, + credentialType: string, + credentialValue: string, + description: string | null = null + ): CreateCredentialResult { + const db = getConnection(); + const result = db + .prepare( + 'INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)' + ) + .run(userId, credentialName, credentialType, credentialValue, description); + return { + id: result.lastInsertRowid, + credentialName, + credentialType, + }; + }, + + /** + * Lists credentials for a user (excluding raw values). + * Optionally filters by credential type (e.g. 'github_token'). + */ + getCredentials( + userId: number, + credentialType: string | null = null + ): CredentialPublicRow[] { + const db = getConnection(); + + if (credentialType) { + return db + .prepare( + 'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? AND credential_type = ? ORDER BY created_at DESC' + ) + .all(userId, credentialType) as CredentialPublicRow[]; + } + + return db + .prepare( + 'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? ORDER BY created_at DESC' + ) + .all(userId) as CredentialPublicRow[]; + }, + + /** + * Returns the raw credential value for the most recent active + * credential of the given type, or null if none exists. + */ + getActiveCredential( + userId: number, + credentialType: string + ): string | null { + const db = getConnection(); + const row = db + .prepare( + 'SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1' + ) + .get(userId, credentialType) as { credential_value: string } | undefined; + return row?.credential_value ?? null; + }, + + /** Permanently removes a credential. Returns true if a row was deleted. */ + deleteCredential(userId: number, credentialId: number): boolean { + const db = getConnection(); + const result = db + .prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?') + .run(credentialId, userId); + return result.changes > 0; + }, + + /** Enables or disables a credential without deleting it. */ + toggleCredential( + userId: number, + credentialId: number, + isActive: boolean + ): boolean { + const db = getConnection(); + const result = db + .prepare( + 'UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?' + ) + .run(isActive ? 1 : 0, credentialId, userId); + return result.changes > 0; + }, +}; diff --git a/server/modules/database/repositories/github-tokens.ts b/server/modules/database/repositories/github-tokens.ts new file mode 100644 index 00000000..475f515b --- /dev/null +++ b/server/modules/database/repositories/github-tokens.ts @@ -0,0 +1,100 @@ +/** + * GitHub tokens repository. + * + * Backward-compatible helper layer over generic credentials storage. + * Tokens are stored in `user_credentials` with `credential_type = 'github_token'`. + */ + +import { getConnection } from '@/modules/database/connection.js'; +import { credentialsDb } from '@/modules/database/repositories/credentials.js'; +import type { + CredentialPublicRow, + CreateCredentialResult, +} from '@/shared/types.js'; + +const GITHUB_TOKEN_TYPE = 'github_token'; + +type CredentialRow = { + id: number; + user_id: number; + credential_name: string; + credential_type: string; + credential_value: string; + description: string | null; + created_at: string; + is_active: number; +}; + +type GithubTokenLookup = CredentialRow & { + github_token: string; +}; + +export const githubTokensDb = { + /** Creates a GitHub token credential entry. */ + createGithubToken( + userId: number, + tokenName: string, + githubToken: string, + description: string | null = null + ): CreateCredentialResult { + return credentialsDb.createCredential( + userId, + tokenName, + GITHUB_TOKEN_TYPE, + githubToken, + description + ); + }, + + /** Returns all GitHub tokens (safe shape: no credential value). */ + getGithubTokens(userId: number): CredentialPublicRow[] { + return credentialsDb.getCredentials(userId, GITHUB_TOKEN_TYPE); + }, + + /** Returns the most recent active GitHub token value for a user. */ + getActiveGithubToken(userId: number): string | null { + return credentialsDb.getActiveCredential(userId, GITHUB_TOKEN_TYPE); + }, + + /** + * Returns a specific active GitHub token row by id/user, including + * a `github_token` compatibility field. + */ + getGithubTokenById(userId: number, tokenId: number): GithubTokenLookup | null { + const db = getConnection(); + const row = db + .prepare( + `SELECT * + FROM user_credentials + WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1` + ) + .get(tokenId, userId, GITHUB_TOKEN_TYPE) as CredentialRow | undefined; + + if (!row) return null; + + return { + ...row, + github_token: row.credential_value, + }; + }, + + /** Updates active state for a GitHub token. */ + updateGithubToken( + userId: number, + tokenId: number, + isActive: boolean + ): boolean { + return credentialsDb.toggleCredential(userId, tokenId, isActive); + }, + + /** Deletes a GitHub token. */ + deleteGithubToken(userId: number, tokenId: number): boolean { + return credentialsDb.deleteCredential(userId, tokenId); + }, + + // Legacy alias used by existing routes + toggleGithubToken(userId: number, tokenId: number, isActive: boolean): boolean { + return githubTokensDb.updateGithubToken(userId, tokenId, isActive); + }, +}; + diff --git a/server/modules/database/repositories/notification-preferences.ts b/server/modules/database/repositories/notification-preferences.ts new file mode 100644 index 00000000..6ba21976 --- /dev/null +++ b/server/modules/database/repositories/notification-preferences.ts @@ -0,0 +1,103 @@ +/** + * Notification preferences repository. + * + * Stores per-user notification channel/event preferences as JSON. + */ + +import { getConnection } from '@/modules/database/connection.js'; + +type NotificationPreferences = { + channels: { + inApp: boolean; + webPush: boolean; + }; + events: { + actionRequired: boolean; + stop: boolean; + error: boolean; + }; +}; + +const DEFAULT_NOTIFICATION_PREFERENCES: NotificationPreferences = { + channels: { + inApp: false, + webPush: false, + }, + events: { + actionRequired: true, + stop: true, + error: true, + }, +}; + +function normalizeNotificationPreferences(value: unknown): NotificationPreferences { + const source = value && typeof value === 'object' ? (value as Record) : {}; + + return { + channels: { + inApp: source.channels?.inApp === true, + webPush: source.channels?.webPush === true, + }, + events: { + actionRequired: source.events?.actionRequired !== false, + stop: source.events?.stop !== false, + error: source.events?.error !== false, + }, + }; +} + +export const notificationPreferencesDb = { + /** Returns the normalized preferences for a user, creating defaults on first read. */ + getNotificationPreferences(userId: number): NotificationPreferences { + const db = getConnection(); + const row = db + .prepare( + 'SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?' + ) + .get(userId) as { preferences_json: string } | undefined; + + if (!row) { + const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES); + db.prepare( + 'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)' + ).run(userId, JSON.stringify(defaults)); + return defaults; + } + + let parsed: unknown; + try { + parsed = JSON.parse(row.preferences_json); + } catch { + parsed = DEFAULT_NOTIFICATION_PREFERENCES; + } + return normalizeNotificationPreferences(parsed); + }, + + /** Upserts normalized preferences for a user and returns the stored value. */ + updateNotificationPreferences( + userId: number, + preferences: unknown + ): NotificationPreferences { + const normalized = normalizeNotificationPreferences(preferences); + const db = getConnection(); + + db.prepare( + `INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) + VALUES (?, ?, CURRENT_TIMESTAMP) + ON CONFLICT(user_id) DO UPDATE SET + preferences_json = excluded.preferences_json, + updated_at = CURRENT_TIMESTAMP` + ).run(userId, JSON.stringify(normalized)); + + return normalized; + }, + + // Legacy aliases used by existing services/routes + getPreferences(userId: number): NotificationPreferences { + return notificationPreferencesDb.getNotificationPreferences(userId); + }, + updatePreferences(userId: number, preferences: unknown): NotificationPreferences { + return notificationPreferencesDb.updateNotificationPreferences(userId, preferences); + }, +}; + diff --git a/server/modules/database/repositories/projects.db.integration.test.ts b/server/modules/database/repositories/projects.db.integration.test.ts new file mode 100644 index 00000000..6ccf0c21 --- /dev/null +++ b/server/modules/database/repositories/projects.db.integration.test.ts @@ -0,0 +1,72 @@ +import assert from 'node:assert/strict'; +import { mkdtemp, rm } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import test from 'node:test'; + +import { closeConnection } from '@/modules/database/connection.js'; +import { initializeDatabase } from '@/modules/database/init-db.js'; +import { projectsDb } from '@/modules/database/repositories/projects.db.js'; + +async function withIsolatedDatabase(runTest: () => void | Promise): Promise { + const previousDatabasePath = process.env.DATABASE_PATH; + const tempDirectory = await mkdtemp(path.join(tmpdir(), 'projects-db-')); + const databasePath = path.join(tempDirectory, 'auth.db'); + + closeConnection(); + process.env.DATABASE_PATH = databasePath; + await initializeDatabase(); + + try { + await runTest(); + } finally { + closeConnection(); + if (previousDatabasePath === undefined) { + delete process.env.DATABASE_PATH; + } else { + process.env.DATABASE_PATH = previousDatabasePath; + } + await rm(tempDirectory, { recursive: true, force: true }); + } +} + +test('projectsDb.createProjectPath returns created for fresh paths', async () => { + await withIsolatedDatabase(() => { + const created = projectsDb.createProjectPath('/workspace/new-project'); + + assert.equal(created.outcome, 'created'); + assert.ok(created.project); + assert.equal(created.project?.project_path, '/workspace/new-project'); + assert.equal(created.project?.isArchived, 0); + }); +}); + +test('projectsDb.createProjectPath returns reactivated_archived for archived duplicates', async () => { + await withIsolatedDatabase(() => { + const initial = projectsDb.createProjectPath('/workspace/archived-project', 'Archived Project'); + assert.equal(initial.outcome, 'created'); + assert.ok(initial.project); + + projectsDb.updateProjectIsArchived('/workspace/archived-project', true); + + const reused = projectsDb.createProjectPath('/workspace/archived-project', 'Renamed Project'); + assert.equal(reused.outcome, 'reactivated_archived'); + assert.ok(reused.project); + assert.equal(reused.project?.project_id, initial.project?.project_id); + assert.equal(reused.project?.isArchived, 0); + }); +}); + +test('projectsDb.createProjectPath returns active_conflict for active duplicates', async () => { + await withIsolatedDatabase(() => { + const initial = projectsDb.createProjectPath('/workspace/active-project'); + assert.equal(initial.outcome, 'created'); + assert.ok(initial.project); + + const conflict = projectsDb.createProjectPath('/workspace/active-project'); + assert.equal(conflict.outcome, 'active_conflict'); + assert.ok(conflict.project); + assert.equal(conflict.project?.project_id, initial.project?.project_id); + assert.equal(conflict.project?.isArchived, 0); + }); +}); diff --git a/server/modules/database/repositories/projects.db.ts b/server/modules/database/repositories/projects.db.ts new file mode 100644 index 00000000..c99b8a54 --- /dev/null +++ b/server/modules/database/repositories/projects.db.ts @@ -0,0 +1,183 @@ +import { randomUUID } from 'node:crypto'; +import path from 'node:path'; + +import { getConnection } from '@/modules/database/connection.js'; +import type { CreateProjectPathResult, ProjectRepositoryRow } from '@/shared/types.js'; +import { normalizeProjectPath } from '@/shared/utils.js'; + +function normalizeProjectDisplayName(projectPath: string, customProjectName: string | null): string { + const trimmedCustomName = typeof customProjectName === 'string' ? customProjectName.trim() : ''; + if (trimmedCustomName.length > 0) { + return trimmedCustomName; + } + + const directoryName = path.basename(projectPath); + return directoryName || projectPath; +} + +export const projectsDb = { + createProjectPath(projectPath: string, customProjectName: string | null = null): CreateProjectPathResult { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + const normalizedProjectName = normalizeProjectDisplayName(normalizedProjectPath, customProjectName); + const attemptedId = randomUUID(); + const row = db.prepare(` + INSERT INTO projects (project_id, project_path, custom_project_name, isArchived) + VALUES (?, ?, ?, 0) + ON CONFLICT(project_path) DO UPDATE SET + isArchived = 0 + WHERE projects.isArchived = 1 + RETURNING project_id, project_path, custom_project_name, isStarred, isArchived + `).get(attemptedId, normalizedProjectPath, normalizedProjectName) as ProjectRepositoryRow | undefined; + + if (row) { + return { + outcome: row.project_id === attemptedId ? 'created' : 'reactivated_archived', + project: row, + }; + } + + const existingProject = projectsDb.getProjectPath(normalizedProjectPath); + return { + outcome: 'active_conflict', + project: existingProject, + }; + }, + + getProjectPath(projectPath: string): ProjectRepositoryRow | null { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + const row = db.prepare(` + SELECT project_id, project_path, custom_project_name, isStarred, isArchived + FROM projects + WHERE project_path = ? + `).get(normalizedProjectPath) as ProjectRepositoryRow | undefined; + + return row ?? null; + }, + + getProjectById(projectId: string): ProjectRepositoryRow | null { + const db = getConnection(); + const row = db.prepare(` + SELECT project_id, project_path, custom_project_name, isStarred, isArchived + FROM projects + WHERE project_id = ? + `).get(projectId) as ProjectRepositoryRow | undefined; + + return row ?? null; + }, + + /** + * Resolve the absolute project directory from a database project_id. + * + * This is the canonical lookup used after the projectName → projectId migration: + * API routes receive the DB-assigned `projectId` and must resolve the real folder + * path through this helper before touching the filesystem. Returns `null` when the + * project row does not exist so callers can respond with a 404. + */ + getProjectPathById(projectId: string): string | null { + const db = getConnection(); + const row = db.prepare(` + SELECT project_path + FROM projects + WHERE project_id = ? + `).get(projectId) as Pick | undefined; + + return row?.project_path ?? null; + }, + + getProjectPaths(): ProjectRepositoryRow[] { + const db = getConnection(); + return db.prepare(` + SELECT project_id, project_path, custom_project_name, isStarred, isArchived + FROM projects + WHERE isArchived = 0 + `).all() as ProjectRepositoryRow[]; + }, + + getCustomProjectName(projectPath: string): string | null { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + const row = db.prepare(` + SELECT custom_project_name + FROM projects + WHERE project_path = ? + `).get(normalizedProjectPath) as Pick | undefined; + + return row?.custom_project_name ?? null; + }, + + updateCustomProjectName(projectPath: string, customProjectName: string | null): void { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + db.prepare(` + INSERT INTO projects (project_id, project_path, custom_project_name) + VALUES (?, ?, ?) + ON CONFLICT(project_path) DO UPDATE SET custom_project_name = excluded.custom_project_name + `).run(randomUUID(), normalizedProjectPath, customProjectName); + }, + + updateCustomProjectNameById(projectId: string, customProjectName: string | null): void { + const db = getConnection(); + db.prepare(` + UPDATE projects + SET custom_project_name = ? + WHERE project_id = ? + `).run(customProjectName, projectId); + }, + + updateProjectIsStarred(projectPath: string, isStarred: boolean): void { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + db.prepare(` + UPDATE projects + SET isStarred = ? + WHERE project_path = ? + `).run(isStarred ? 1 : 0, normalizedProjectPath); + }, + + updateProjectIsStarredById(projectId: string, isStarred: boolean): void { + const db = getConnection(); + db.prepare(` + UPDATE projects + SET isStarred = ? + WHERE project_id = ? + `).run(isStarred ? 1 : 0, projectId); + }, + + updateProjectIsArchived(projectPath: string, isArchived: boolean): void { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + db.prepare(` + UPDATE projects + SET isArchived = ? + WHERE project_path = ? + `).run(isArchived ? 1 : 0, normalizedProjectPath); + }, + + updateProjectIsArchivedById(projectId: string, isArchived: boolean): void { + const db = getConnection(); + db.prepare(` + UPDATE projects + SET isArchived = ? + WHERE project_id = ? + `).run(isArchived ? 1 : 0, projectId); + }, + + deleteProjectPath(projectPath: string): void { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + db.prepare(` + DELETE FROM projects + WHERE project_path = ? + `).run(normalizedProjectPath); + }, + + deleteProjectById(projectId: string): void { + const db = getConnection(); + db.prepare(` + DELETE FROM projects + WHERE project_id = ? + `).run(projectId); + }, +}; diff --git a/server/modules/database/repositories/push-subscriptions.ts b/server/modules/database/repositories/push-subscriptions.ts new file mode 100644 index 00000000..91ef044c --- /dev/null +++ b/server/modules/database/repositories/push-subscriptions.ts @@ -0,0 +1,80 @@ +/** + * Push subscriptions repository. + * + * Persists browser push subscription endpoints and keys per user. + */ + +import { getConnection } from '@/modules/database/connection.js'; + +type PushSubscriptionLookupRow = { + endpoint: string; + keys_p256dh: string; + keys_auth: string; +}; + +export const pushSubscriptionsDb = { + /** Upserts a push subscription endpoint for a user. */ + createPushSubscription( + userId: number, + endpoint: string, + keysP256dh: string, + keysAuth: string + ): void { + const db = getConnection(); + db.prepare( + `INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth) + VALUES (?, ?, ?, ?) + ON CONFLICT(endpoint) DO UPDATE SET + user_id = excluded.user_id, + keys_p256dh = excluded.keys_p256dh, + keys_auth = excluded.keys_auth` + ).run(userId, endpoint, keysP256dh, keysAuth); + }, + + /** Returns all subscriptions for a user. */ + getPushSubscriptions(userId: number): PushSubscriptionLookupRow[] { + const db = getConnection(); + return db + .prepare( + 'SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?' + ) + .all(userId) as PushSubscriptionLookupRow[]; + }, + + /** Deletes one subscription by endpoint. */ + deletePushSubscription(endpoint: string): void { + const db = getConnection(); + db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint); + }, + + /** Deletes all subscriptions for a user. */ + deletePushSubscriptionsForUser(userId: number): void { + const db = getConnection(); + db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId); + }, + + // Legacy aliases used by existing services/routes + saveSubscription( + userId: number, + endpoint: string, + keysP256dh: string, + keysAuth: string + ): void { + pushSubscriptionsDb.createPushSubscription( + userId, + endpoint, + keysP256dh, + keysAuth + ); + }, + getSubscriptions(userId: number): PushSubscriptionLookupRow[] { + return pushSubscriptionsDb.getPushSubscriptions(userId); + }, + removeSubscription(endpoint: string): void { + pushSubscriptionsDb.deletePushSubscription(endpoint); + }, + removeAllForUser(userId: number): void { + pushSubscriptionsDb.deletePushSubscriptionsForUser(userId); + }, +}; + diff --git a/server/modules/database/repositories/scan-state.db.ts b/server/modules/database/repositories/scan-state.db.ts new file mode 100644 index 00000000..5e93940c --- /dev/null +++ b/server/modules/database/repositories/scan-state.db.ts @@ -0,0 +1,42 @@ +import { getConnection } from '@/modules/database/connection.js'; + +type ScanStateRow = { + last_scanned_at: string; +}; + +export const scanStateDb = { + getLastScannedAt() { + const db = getConnection(); + + const row = db + .prepare(`SELECT last_scanned_at FROM scan_state WHERE id = 1`) + .get() as ScanStateRow; + + if (!row) { + return null; // Before any scan, the row is undefined. + } + + let lastScannedDate: Date | null = null; + const lastScannedStr = row.last_scanned_at; + + if (lastScannedStr) { + // SQLite CURRENT_TIMESTAMP returns UTC in "YYYY-MM-DD HH:MM:SS" format. + // Replace space with 'T' and append 'Z' to parse reliably in JS across all platforms. + lastScannedDate = new Date(lastScannedStr.replace(' ', 'T') + 'Z'); + } + + return lastScannedDate; + }, + + updateLastScannedAt(scannedAt: Date = new Date()) { + const db = getConnection(); + const sqliteTimestamp = scannedAt.toISOString().slice(0, 19).replace('T', ' '); + + db.prepare(` + INSERT INTO scan_state (id, last_scanned_at) + VALUES (1, ?) + ON CONFLICT (id) + DO UPDATE SET last_scanned_at = excluded.last_scanned_at + `).run(sqliteTimestamp); + } +}; diff --git a/server/modules/database/repositories/sessions.db.ts b/server/modules/database/repositories/sessions.db.ts new file mode 100644 index 00000000..19a96a56 --- /dev/null +++ b/server/modules/database/repositories/sessions.db.ts @@ -0,0 +1,174 @@ +import { getConnection } from '@/modules/database/connection.js'; +import { projectsDb } from '@/modules/database/repositories/projects.db.js'; +import { normalizeProjectPath } from '@/shared/utils.js'; + +type SessionRow = { + session_id: string; + provider: string; + project_path: string | null; + jsonl_path: string | null; + custom_name: string | null; + created_at: string; + updated_at: string; +}; + +type SessionMetadataLookupRow = Pick< + SessionRow, + 'session_id' | 'provider' | 'project_path' | 'jsonl_path' | 'custom_name' | 'created_at' | 'updated_at' +>; + +function normalizeTimestamp(value?: string): string | null { + if (!value) return null; + + const parsed = new Date(value); + if (Number.isNaN(parsed.getTime())) { + return null; + } + + return parsed.toISOString(); +} + +function normalizeProjectPathForProvider(provider: string, projectPath: string): string { + void provider; + return normalizeProjectPath(projectPath); +} + +export const sessionsDb = { + createSession( + sessionId: string, + provider: string, + projectPath: string, + customName?: string, + createdAt?: string, + updatedAt?: string, + jsonlPath?: string | null + ): string { + const db = getConnection(); + const createdAtValue = normalizeTimestamp(createdAt); + const updatedAtValue = normalizeTimestamp(updatedAt); + const normalizedProjectPath = normalizeProjectPathForProvider(provider, projectPath); + + // First, ensure the project path is recorded in the projects table, + // since it's a foreign key in the sessions table. + projectsDb.createProjectPath(normalizedProjectPath); + + db.prepare( + `INSERT INTO sessions (session_id, provider, custom_name, project_path, jsonl_path, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, COALESCE(?, CURRENT_TIMESTAMP), COALESCE(?, CURRENT_TIMESTAMP)) + ON CONFLICT(session_id) DO UPDATE SET + provider = excluded.provider, + updated_at = excluded.updated_at, + project_path = excluded.project_path, + jsonl_path = excluded.jsonl_path, + custom_name = COALESCE(excluded.custom_name, sessions.custom_name)` + ).run( + sessionId, + provider, + customName ?? null, + normalizedProjectPath, + jsonlPath ?? null, + createdAtValue, + updatedAtValue + ); + + return sessionId; + }, + + updateSessionCustomName(sessionId: string, customName: string): void { + const db = getConnection(); + db.prepare( + `UPDATE sessions + SET custom_name = ? + WHERE session_id = ?` + ).run(customName, sessionId); + }, + + getSessionById(sessionId: string): SessionMetadataLookupRow | null { + const db = getConnection(); + const row = db + .prepare( + `SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at + FROM sessions + WHERE session_id = ? + ORDER BY updated_at DESC + LIMIT 1` + ) + .get(sessionId) as SessionMetadataLookupRow | undefined; + + return row ?? null; + }, + + getAllSessions(): SessionRow[] { + const db = getConnection(); + return db + .prepare( + `SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at + FROM sessions` + ) + .all() as SessionRow[]; + }, + + getSessionsByProjectPath(projectPath: string): SessionRow[] { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + return db + .prepare( + `SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at + FROM sessions + WHERE project_path = ?` + ) + .all(normalizedProjectPath) as SessionRow[]; + }, + + getSessionsByProjectPathPage(projectPath: string, limit: number, offset: number): SessionRow[] { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + return db + .prepare( + `SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at + FROM sessions + WHERE project_path = ? + ORDER BY datetime(COALESCE(updated_at, created_at)) DESC, session_id DESC + LIMIT ? OFFSET ?` + ) + .all(normalizedProjectPath, limit, offset) as SessionRow[]; + }, + + countSessionsByProjectPath(projectPath: string): number { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + const row = db + .prepare( + `SELECT COUNT(*) AS count + FROM sessions + WHERE project_path = ?` + ) + .get(normalizedProjectPath) as { count: number } | undefined; + + return Number(row?.count ?? 0); + }, + + deleteSessionsByProjectPath(projectPath: string): void { + const db = getConnection(); + const normalizedProjectPath = normalizeProjectPath(projectPath); + db.prepare(`DELETE FROM sessions WHERE project_path = ?`).run(normalizedProjectPath); + }, + + getSessionName(sessionId: string, provider: string): string | null { + const db = getConnection(); + const row = db + .prepare( + `SELECT custom_name + FROM sessions + WHERE session_id = ? AND provider = ?` + ) + .get(sessionId, provider) as { custom_name: string | null } | undefined; + + return row?.custom_name ?? null; + }, + + deleteSessionById(sessionId: string): boolean { + const db = getConnection(); + return db.prepare('DELETE FROM sessions WHERE session_id = ?').run(sessionId).changes > 0; + }, +}; diff --git a/server/modules/database/repositories/users.ts b/server/modules/database/repositories/users.ts new file mode 100644 index 00000000..25bb57bd --- /dev/null +++ b/server/modules/database/repositories/users.ts @@ -0,0 +1,140 @@ +/** + * User repository. + * + * Provides typed CRUD operations for the `users` table. + * This is a single-user system, but the schema supports multiple + * users for forward compatibility. + */ + +import { getConnection } from '@/modules/database/connection.js'; + +type UserRow = { + id: number; + username: string; + password_hash: string; + created_at: string; + last_login: string | null; + is_active: number; + git_name: string | null; + git_email: string | null; + has_completed_onboarding: number; +}; + +type UserPublicRow = Pick; + +type UserGitConfig = { + git_name: string | null; + git_email: string | null; +}; + +type CreateUserResult = { + id: number | bigint; + username: string; +}; + +// --------------------------------------------------------------------------- +// Queries +// --------------------------------------------------------------------------- + +export const userDb = { + /** Returns true if at least one user exists in the database. */ + hasUsers(): boolean { + const db = getConnection(); + const row = db.prepare('SELECT COUNT(*) as count FROM users').get() as { + count: number; + }; + return row.count > 0; + }, + + /** Inserts a new user and returns the created ID + username. */ + createUser(username: string, passwordHash: string): CreateUserResult { + const db = getConnection(); + const result = db + .prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)') + .run(username, passwordHash); + return { id: result.lastInsertRowid, username }; + }, + + /** + * Looks up an active user by username. + * Returns the full row (including password hash) for auth verification. + */ + getUserByUsername(username: string): UserRow | undefined { + const db = getConnection(); + return db + .prepare('SELECT * FROM users WHERE username = ? AND is_active = 1') + .get(username) as UserRow | undefined; + }, + + /** Updates the last_login timestamp. Non-fatal — logs but does not throw. */ + updateLastLogin(userId: number): void { + try { + const db = getConnection(); + db.prepare( + 'UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?' + ).run(userId); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + console.error('Failed to update last login', { error: message }); + } + }, + + /** Returns public user fields by ID (no password hash). */ + getUserById(userId: number): UserPublicRow | undefined { + const db = getConnection(); + return db + .prepare( + 'SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1' + ) + .get(userId) as UserPublicRow | undefined; + }, + + /** Returns the first active user. Used for single-user mode lookups. */ + getFirstUser(): UserPublicRow | undefined { + const db = getConnection(); + return db + .prepare( + 'SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1' + ) + .get() as UserPublicRow | undefined; + }, + + /** Stores the user's preferred git name and email. */ + updateGitConfig( + userId: number, + gitName: string, + gitEmail: string + ): void { + const db = getConnection(); + db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?').run( + gitName, + gitEmail, + userId + ); + }, + + /** Retrieves the user's git identity (name + email). */ + getGitConfig(userId: number): UserGitConfig | undefined { + const db = getConnection(); + return db + .prepare('SELECT git_name, git_email FROM users WHERE id = ?') + .get(userId) as UserGitConfig | undefined; + }, + + /** Marks onboarding as complete for the given user. */ + completeOnboarding(userId: number): void { + const db = getConnection(); + db.prepare( + 'UPDATE users SET has_completed_onboarding = 1 WHERE id = ?' + ).run(userId); + }, + + /** Returns true if the user has finished the onboarding flow. */ + hasCompletedOnboarding(userId: number): boolean { + const db = getConnection(); + const row = db + .prepare('SELECT has_completed_onboarding FROM users WHERE id = ?') + .get(userId) as { has_completed_onboarding: number } | undefined; + return row?.has_completed_onboarding === 1; + }, +}; diff --git a/server/modules/database/repositories/vapid-keys.ts b/server/modules/database/repositories/vapid-keys.ts new file mode 100644 index 00000000..13faeb34 --- /dev/null +++ b/server/modules/database/repositories/vapid-keys.ts @@ -0,0 +1,57 @@ +/** + * VAPID keys repository. + * + * Stores and retrieves the Web Push VAPID key pair. + */ + +import { getConnection } from '@/modules/database/connection.js'; + +type VapidKeyRow = { + public_key: string; + private_key: string; +}; + +type VapidKeyPair = { + publicKey: string; + privateKey: string; +}; + +export const vapidKeysDb = { + /** Returns the latest stored VAPID key pair, or null when unset. */ + getVapidKeys(): VapidKeyPair | null { + const db = getConnection(); + const row = db + .prepare( + 'SELECT public_key, private_key FROM vapid_keys ORDER BY id DESC LIMIT 1' + ) + .get() as Pick | undefined; + + if (!row) return null; + return { + publicKey: row.public_key, + privateKey: row.private_key, + }; + }, + + /** Persists a new VAPID key pair. */ + createVapidKeys(publicKey: string, privateKey: string): void { + const db = getConnection(); + db.prepare( + 'INSERT INTO vapid_keys (public_key, private_key) VALUES (?, ?)' + ).run(publicKey, privateKey); + }, + + /** Replaces all existing keys with a fresh pair. */ + updateVapidKeys(publicKey: string, privateKey: string): void { + const db = getConnection(); + db.prepare('DELETE FROM vapid_keys').run(); + vapidKeysDb.createVapidKeys(publicKey, privateKey); + }, + + /** Deletes all VAPID key rows. */ + deleteVapidKeys(): void { + const db = getConnection(); + db.prepare('DELETE FROM vapid_keys').run(); + }, +}; + diff --git a/server/modules/database/schema.ts b/server/modules/database/schema.ts new file mode 100644 index 00000000..7af3d80d --- /dev/null +++ b/server/modules/database/schema.ts @@ -0,0 +1,152 @@ +const USER_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + username TEXT UNIQUE NOT NULL, + password_hash TEXT NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + last_login DATETIME, + is_active BOOLEAN DEFAULT 1, + git_name TEXT, + git_email TEXT, + has_completed_onboarding BOOLEAN DEFAULT 0 +); +`; + +export const API_KEYS_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS api_keys ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + key_name TEXT NOT NULL, + api_key TEXT UNIQUE NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + last_used DATETIME, + is_active BOOLEAN DEFAULT 1, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE +); +`; + +export const USER_CREDENTIALS_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS user_credentials ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + credential_name TEXT NOT NULL, + credential_type TEXT NOT NULL, -- 'github_token', 'gitlab_token', 'bitbucket_token', etc. + credential_value TEXT NOT NULL, + description TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT 1, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE +); +`; + +export const USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS user_notification_preferences ( + user_id INTEGER PRIMARY KEY, + preferences_json TEXT NOT NULL, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE +); +`; + +export const VAPID_KEYS_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS vapid_keys ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + public_key TEXT NOT NULL, + private_key TEXT NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP +); +`; + +export const PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS push_subscriptions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + endpoint TEXT NOT NULL UNIQUE, + keys_p256dh TEXT NOT NULL, + keys_auth TEXT NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE +); +`; + +export const PROJECTS_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS projects ( + project_id TEXT PRIMARY KEY NOT NULL, + project_path TEXT NOT NULL UNIQUE, + custom_project_name TEXT DEFAULT NULL, + isStarred BOOLEAN DEFAULT 0, + isArchived BOOLEAN DEFAULT 0 +); +`; + +export const SESSIONS_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS sessions ( + session_id TEXT NOT NULL, + provider TEXT NOT NULL DEFAULT 'claude', + custom_name TEXT, + project_path TEXT, + jsonl_path TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (session_id), + FOREIGN KEY (project_path) REFERENCES projects(project_path) + ON DELETE SET NULL + ON UPDATE CASCADE +); +`; + +export const LAST_SCANNED_AT_SQL = ` +CREATE TABLE IF NOT EXISTS scan_state ( + id INTEGER PRIMARY KEY CHECK (id = 1), + last_scanned_at TIMESTAMP NULL +); +`; + +export const APP_CONFIG_TABLE_SCHEMA_SQL = ` +CREATE TABLE IF NOT EXISTS app_config ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP +); +`; + +export const INIT_SCHEMA_SQL = ` +-- Initialize authentication database +PRAGMA foreign_keys = ON; + +${USER_TABLE_SCHEMA_SQL} +-- Indexes for performance for user lookups +CREATE INDEX IF NOT EXISTS idx_users_username ON users(username); +CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active); + +${API_KEYS_TABLE_SCHEMA_SQL} +CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key); +CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id); +CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active); + +${USER_CREDENTIALS_TABLE_SCHEMA_SQL} +CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id); +CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type); +CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active); + +${USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL} +CREATE INDEX IF NOT EXISTS idx_user_notification_preferences_user_id ON user_notification_preferences(user_id); + +${VAPID_KEYS_TABLE_SCHEMA_SQL} + +${PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL} +CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id); + +${PROJECTS_TABLE_SCHEMA_SQL} +-- NOTE: These indexes are created in migrations after legacy table-shape repairs. +-- Creating them here can fail on upgraded installs where projects lacks those columns. + +${SESSIONS_TABLE_SCHEMA_SQL} +CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id); +-- NOTE: This index is created in migrations after sessions is rebuilt to include project_path. +-- Creating it here can fail on upgraded installs where the legacy sessions table has no project_path. + +${LAST_SCANNED_AT_SQL} + +${APP_CONFIG_TABLE_SCHEMA_SQL} +`; diff --git a/server/modules/projects/index.ts b/server/modules/projects/index.ts new file mode 100644 index 00000000..2adbf7a5 --- /dev/null +++ b/server/modules/projects/index.ts @@ -0,0 +1,6 @@ +export { + generateDisplayName, + getProjectsWithSessions, +} from './services/projects-with-sessions-fetch.service.js'; +export { updateProjectDisplayName } from './services/project-management.service.js'; +export { deleteOrArchiveProject, deleteSessionJsonlFilesForProjectPath } from './services/project-delete.service.js'; diff --git a/server/modules/projects/projects.routes.ts b/server/modules/projects/projects.routes.ts new file mode 100644 index 00000000..a1c94352 --- /dev/null +++ b/server/modules/projects/projects.routes.ts @@ -0,0 +1,247 @@ +import express from 'express'; + +import { createProject, updateProjectDisplayName } from '@/modules/projects/services/project-management.service.js'; +import { startCloneProject } from '@/modules/projects/services/project-clone.service.js'; +import { getProjectTaskMaster } from '@/modules/projects/services/projects-has-taskmaster.service.js'; +import { AppError, asyncHandler } from '@/shared/utils.js'; +import { getProjectSessionsPage, getProjectsWithSessions } from '@/modules/projects/services/projects-with-sessions-fetch.service.js'; +import { deleteOrArchiveProject } from '@/modules/projects/services/project-delete.service.js'; +import { applyLegacyStarredProjectIds, toggleProjectStar } from '@/modules/projects/services/project-star.service.js'; + +const router = express.Router(); + +type AuthenticatedUser = { + id?: number | string; +}; + +function readQueryStringValue(value: unknown): string { + if (typeof value === 'string') { + return value; + } + + if (Array.isArray(value) && typeof value[0] === 'string') { + return value[0]; + } + + return ''; +} + +function readOptionalNumericQueryValue(value: unknown): number | null { + const rawValue = readQueryStringValue(value).trim(); + if (!rawValue) { + return null; + } + + const parsedValue = Number.parseInt(rawValue, 10); + return Number.isNaN(parsedValue) ? null : parsedValue; +} + +function parseNonNegativeIntQuery(value: unknown, name: string, fallback: number): number { + const rawValue = readQueryStringValue(value).trim(); + if (!rawValue) { + return fallback; + } + + const parsedValue = Number.parseInt(rawValue, 10); + if (Number.isNaN(parsedValue) || parsedValue < 0) { + throw new AppError(`${name} must be a non-negative integer`, { + code: 'INVALID_QUERY_PARAMETER', + statusCode: 400, + }); + } + + return parsedValue; +} + +function resolveRouteErrorMessage(error: unknown): string { + if (error instanceof AppError) { + return error.message; + } + + if (error instanceof Error && error.message) { + return error.message; + } + + return 'Failed to clone repository'; +} + +router.get( + '/', + asyncHandler(async (_req, res) => { + const projects = await getProjectsWithSessions(); + res.json(projects); + }), +); + +router.get( + '/:projectId/sessions', + asyncHandler(async (req, res) => { + const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : ''; + const limit = parseNonNegativeIntQuery(req.query.limit, 'limit', 20); + const offset = parseNonNegativeIntQuery(req.query.offset, 'offset', 0); + const sessionsPage = await getProjectSessionsPage(projectId, { limit, offset }); + res.json(sessionsPage); + }), +); + +router.post( + '/create-project', + asyncHandler(async (req, res) => { + const requestBody = req.body as Record; + const projectPath = typeof requestBody.path === 'string' ? requestBody.path : ''; + const customName = typeof requestBody.customName === 'string' ? requestBody.customName : null; + + if (requestBody.workspaceType !== undefined) { + throw new AppError('workspaceType is no longer supported. Use the single create-project flow.', { + code: 'LEGACY_WORKSPACE_TYPE_UNSUPPORTED', + statusCode: 400, + }); + } + + if (requestBody.githubUrl || requestBody.githubTokenId || requestBody.newGithubToken) { + throw new AppError('Repository cloning is not supported on create-project', { + code: 'CLONE_NOT_SUPPORTED_ON_CREATE_PROJECT', + statusCode: 400, + details: 'Use /api/projects/clone-progress for cloning workflows', + }); + } + + const projectCreationResult = await createProject({ + projectPath, + customName, + }); + + res.json({ + success: true, + project: projectCreationResult.project, + message: + projectCreationResult.outcome === 'reactivated_archived' + ? 'Archived project path reused successfully' + : 'Project created successfully', + }); + }), +); + +/** + * One-time (or idempotent) migration: apply legacy `localStorage` starred projectIds to the DB, then clear client storage. + */ +router.post( + '/migrate-legacy-stars', + asyncHandler(async (req, res) => { + const projectIds = Array.isArray((req.body as { projectIds?: unknown })?.projectIds) + ? ((req.body as { projectIds: unknown[] }).projectIds as unknown[]).map((x) => String(x)) + : []; + const { updated } = applyLegacyStarredProjectIds(projectIds); + res.json({ success: true, updated }); + }), +); + +router.get('/clone-progress', async (req, res) => { + res.setHeader('Content-Type', 'text/event-stream'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + res.flushHeaders(); + + const sendEvent = (type: string, data: Record) => { + if (res.writableEnded) { + return; + } + + res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`); + }; + + let cloneOperation: Awaited> | null = null; + const closeListener = () => { + cloneOperation?.cancel(); + }; + req.on('close', closeListener); + + try { + const queryParams = req.query as Record; + const workspacePath = readQueryStringValue(queryParams.path); + const githubUrl = readQueryStringValue(queryParams.githubUrl); + const githubTokenId = readOptionalNumericQueryValue(queryParams.githubTokenId); + const newGithubToken = readQueryStringValue(queryParams.newGithubToken) || null; + + const authenticatedUser = (req as typeof req & { user?: AuthenticatedUser }).user; + const userId = authenticatedUser?.id; + if (userId === undefined || userId === null) { + throw new AppError('Authenticated user is required', { + code: 'AUTHENTICATION_REQUIRED', + statusCode: 401, + }); + } + + cloneOperation = await startCloneProject( + { + workspacePath, + githubUrl, + githubTokenId, + newGithubToken, + userId, + }, + { + onProgress: (message) => { + sendEvent('progress', { message }); + }, + onComplete: ({ project, message }) => { + sendEvent('complete', { project, message }); + }, + }, + ); + + await cloneOperation.waitForCompletion; + } catch (error) { + sendEvent('error', { message: resolveRouteErrorMessage(error) }); + } finally { + req.off('close', closeListener); + if (!res.writableEnded) { + res.end(); + } + } +}); + +router.get( + '/:projectId/taskmaster', + asyncHandler(async (req, res) => { + const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : ''; + const taskMasterDetails = await getProjectTaskMaster(projectId); + res.json(taskMasterDetails); + }), +); + +router.put('/:projectId/rename', (req, res) => { + try { + const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : ''; + const { displayName } = req.body as { displayName?: unknown }; + updateProjectDisplayName(projectId, displayName); + res.json({ success: true }); + } catch (error) { + res.status(500).json({ error: error instanceof Error ? error.message : 'Failed to rename project' }); + } +}); + +router.post( + '/:projectId/toggle-star', + asyncHandler(async (req, res) => { + const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : ''; + const { isStarred } = toggleProjectStar(projectId); + res.json({ success: true, isStarred }); + }), +); + +/** + * - `force` not set / false: archive project in DB only (`isArchived` = 1; hidden from active list). + * - `force=true`: remove DB row, delete session rows for that path, remove all `*.jsonl` under the Claude project dir. + */ +router.delete( + '/:projectId', + asyncHandler(async (req, res) => { + const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : ''; + const force = req.query.force === 'true'; + await deleteOrArchiveProject(projectId, force); + res.json({ success: true }); + }), +); + +export default router; diff --git a/server/modules/projects/services/project-clone.service.ts b/server/modules/projects/services/project-clone.service.ts new file mode 100644 index 00000000..1a91b879 --- /dev/null +++ b/server/modules/projects/services/project-clone.service.ts @@ -0,0 +1,321 @@ +import { spawn } from 'node:child_process'; +import { access, mkdir, rm } from 'node:fs/promises'; +import path from 'node:path'; + +import { githubTokensDb } from '@/modules/database/index.js'; +import { createProject } from '@/modules/projects/services/project-management.service.js'; +import type { WorkspacePathValidationResult } from '@/shared/types.js'; +import { AppError, validateWorkspacePath } from '@/shared/utils.js'; + +type CloneProjectInput = { + workspacePath: string; + githubUrl: string; + githubTokenId?: number | null; + newGithubToken?: string | null; + userId: number | string; +}; + +type CloneCompletePayload = { + project: Record; + message: string; +}; + +type CloneProjectEventHandlers = { + onProgress: (message: string) => void; + onComplete: (payload: CloneCompletePayload) => void; +}; + +type GitCloneProcess = { + stdout: NodeJS.ReadableStream | null; + stderr: NodeJS.ReadableStream | null; + on(event: 'close', listener: (code: number | null) => void): void; + on(event: 'error', listener: (error: NodeJS.ErrnoException) => void): void; + kill(): void; +}; + +type CloneProjectDependencies = { + validatePath: (requestedPath: string) => Promise; + ensureDirectory: (directoryPath: string) => Promise; + pathExists: (targetPath: string) => Promise; + removePath: (targetPath: string) => Promise; + getGithubTokenById: ( + tokenId: number, + userId: number, + ) => Promise<{ github_token: string } | null>; + spawnGitClone: (cloneUrl: string, clonePath: string) => GitCloneProcess; + registerProject: (projectPath: string, customName: string) => Promise<{ project: Record }>; + logError: (message: string, error: unknown) => void; +}; + +export type CloneProjectOperation = { + waitForCompletion: Promise; + cancel: () => void; +}; + +async function defaultPathExists(targetPath: string): Promise { + try { + await access(targetPath); + return true; + } catch (error) { + if ((error as NodeJS.ErrnoException).code === 'ENOENT') { + return false; + } + + throw error; + } +} + +function sanitizeGitError(message: string, token: string | null): string { + if (!message || !token) { + return message; + } + + const escapedToken = token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + return message.replace(new RegExp(escapedToken, 'g'), '***'); +} + +function resolveCloneFailureMessage(lastError: string, sanitizedError: string): string { + if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) { + return 'Authentication failed. Please check your credentials.'; + } + + if (lastError.includes('Repository not found')) { + return 'Repository not found. Please check the URL and ensure you have access.'; + } + + if (lastError.includes('already exists')) { + return 'Directory already exists'; + } + + if (sanitizedError) { + return sanitizedError; + } + + return 'Git clone failed'; +} + +function resolveErrorMessage(error: unknown): string { + if (error instanceof AppError) { + return error.message; + } + + if (error instanceof Error && error.message) { + return error.message; + } + + return 'Unexpected error'; +} + +const defaultDependencies: CloneProjectDependencies = { + validatePath: validateWorkspacePath, + ensureDirectory: async (directoryPath: string): Promise => { + await mkdir(directoryPath, { recursive: true }); + }, + pathExists: defaultPathExists, + removePath: async (targetPath: string): Promise => { + await rm(targetPath, { recursive: true, force: true }); + }, + getGithubTokenById: async ( + tokenId: number, + userId: number, + ): Promise<{ github_token: string } | null> => { + const tokenRow = githubTokensDb.getGithubTokenById(userId, tokenId) as + | { github_token: string } + | null; + return tokenRow; + }, + spawnGitClone: (cloneUrl: string, clonePath: string): GitCloneProcess => + spawn('git', ['clone', '--progress', '--', cloneUrl, clonePath], { + stdio: ['ignore', 'pipe', 'pipe'], + env: { + ...process.env, + GIT_TERMINAL_PROMPT: '0', + }, + }) as unknown as GitCloneProcess, + registerProject: async ( + projectPath: string, + customName: string, + ): Promise<{ project: Record }> => + createProject({ + projectPath, + customName, + }) as Promise<{ project: Record }>, + logError: (message: string, error: unknown): void => { + console.error(message, error); + }, +}; + +export async function startCloneProject( + input: CloneProjectInput, + handlers: CloneProjectEventHandlers, + dependencies: CloneProjectDependencies = defaultDependencies, +): Promise { + const normalizedWorkspacePath = input.workspacePath.trim(); + const normalizedGithubUrl = input.githubUrl.trim(); + + if (!normalizedWorkspacePath) { + throw new AppError('workspacePath and githubUrl are required', { + code: 'WORKSPACE_PATH_REQUIRED', + statusCode: 400, + }); + } + + if (!normalizedGithubUrl) { + throw new AppError('workspacePath and githubUrl are required', { + code: 'GITHUB_URL_REQUIRED', + statusCode: 400, + }); + } + + if (normalizedGithubUrl.startsWith('-')) { + throw new AppError('Invalid githubUrl', { + code: 'INVALID_GITHUB_URL', + statusCode: 400, + }); + } + + const pathValidation = await dependencies.validatePath(normalizedWorkspacePath); + if (!pathValidation.valid || !pathValidation.resolvedPath) { + throw new AppError(pathValidation.error || 'Invalid workspace path', { + code: 'INVALID_PROJECT_PATH', + statusCode: 400, + }); + } + + const absolutePath = pathValidation.resolvedPath; + await dependencies.ensureDirectory(absolutePath); + + let githubToken: string | null = null; + if (typeof input.githubTokenId === 'number') { + const numericUserId = + typeof input.userId === 'number' ? input.userId : Number.parseInt(String(input.userId), 10); + if (Number.isNaN(numericUserId)) { + throw new AppError('Authenticated user is required', { + code: 'AUTHENTICATION_REQUIRED', + statusCode: 401, + }); + } + + const token = await dependencies.getGithubTokenById(input.githubTokenId, numericUserId); + if (!token) { + throw new AppError('GitHub token not found', { + code: 'GITHUB_TOKEN_NOT_FOUND', + statusCode: 404, + }); + } + + githubToken = token.github_token; + } else if (input.newGithubToken && input.newGithubToken.trim().length > 0) { + githubToken = input.newGithubToken.trim(); + } + + const sanitizedGithubUrl = normalizedGithubUrl.replace(/\/+$/, '').replace(/\.git$/, ''); + const repoName = sanitizedGithubUrl.split('/').pop() || 'repository'; + const clonePath = path.join(absolutePath, repoName); + + if (await dependencies.pathExists(clonePath)) { + throw new AppError( + `Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.`, + { + code: 'CLONE_TARGET_ALREADY_EXISTS', + statusCode: 409, + }, + ); + } + + let cloneUrl = normalizedGithubUrl; + if (githubToken) { + try { + const url = new URL(normalizedGithubUrl); + url.username = githubToken; + url.password = ''; + cloneUrl = url.toString(); + } catch { + // SSH URLs cannot be represented by URL constructor and are used as-is. + } + } + + handlers.onProgress(`Cloning into '${repoName}'...`); + const gitProcess = dependencies.spawnGitClone(cloneUrl, clonePath); + let lastError = ''; + + gitProcess.stdout?.on('data', (data: Buffer | string) => { + const message = data.toString().trim(); + if (message) { + handlers.onProgress(message); + } + }); + + gitProcess.stderr?.on('data', (data: Buffer | string) => { + const message = data.toString().trim(); + lastError = message; + if (message) { + handlers.onProgress(message); + } + }); + + const waitForCompletion = new Promise((resolve, reject) => { + gitProcess.on('close', async (code) => { + if (code === 0) { + try { + const createdProject = await dependencies.registerProject(clonePath, repoName); + handlers.onComplete({ + project: createdProject.project, + message: 'Repository cloned successfully', + }); + resolve(); + } catch (error) { + reject( + new AppError(`Clone succeeded but failed to add project: ${resolveErrorMessage(error)}`, { + code: 'CLONE_PROJECT_REGISTRATION_FAILED', + statusCode: 500, + }), + ); + } + return; + } + + const sanitizedError = sanitizeGitError(lastError, githubToken); + const errorMessage = resolveCloneFailureMessage(lastError, sanitizedError); + + try { + await dependencies.removePath(clonePath); + } catch (cleanupError) { + dependencies.logError('Failed to clean up after clone failure:', cleanupError); + } + + reject( + new AppError(errorMessage, { + code: 'GIT_CLONE_FAILED', + statusCode: 500, + }), + ); + }); + + gitProcess.on('error', (error) => { + if (error.code === 'ENOENT') { + reject( + new AppError('Git is not installed or not in PATH', { + code: 'GIT_NOT_FOUND', + statusCode: 500, + }), + ); + return; + } + + reject( + new AppError(error.message, { + code: 'GIT_EXECUTION_FAILED', + statusCode: 500, + }), + ); + }); + }); + + return { + waitForCompletion, + cancel: () => { + gitProcess.kill(); + }, + }; +} diff --git a/server/modules/projects/services/project-delete.service.ts b/server/modules/projects/services/project-delete.service.ts new file mode 100644 index 00000000..a743b4b6 --- /dev/null +++ b/server/modules/projects/services/project-delete.service.ts @@ -0,0 +1,75 @@ +import { promises as fs } from 'node:fs'; +import path from 'node:path'; + +import { projectsDb, sessionsDb } from '@/modules/database/index.js'; +import { AppError } from '@/shared/utils.js'; + +function uniqueJsonlPathsFromSessions( + sessions: Array<{ jsonl_path: string | null }>, +): string[] { + const seen = new Set(); + const result: string[] = []; + + for (const row of sessions) { + const raw = row.jsonl_path?.trim(); + if (!raw) { + continue; + } + const absolute = path.isAbsolute(raw) ? path.normalize(raw) : path.resolve(raw); + if (seen.has(absolute)) { + continue; + } + seen.add(absolute); + result.push(absolute); + } + + return result; +} + +async function unlinkJsonlIfExists(filePath: string): Promise { + try { + await fs.unlink(filePath); + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (code === 'ENOENT') { + return; + } + console.warn(`[project-delete] Failed to remove ${filePath}:`, (error as Error).message); + } +} + +/** + * Loads all session rows for the project path and removes each distinct `jsonl_path` file on disk. + */ +export async function deleteSessionJsonlFilesForProjectPath(projectPath: string): Promise { + const sessions = sessionsDb.getSessionsByProjectPath(projectPath); + const paths = uniqueJsonlPathsFromSessions(sessions); + + for (const filePath of paths) { + await unlinkJsonlIfExists(filePath); + } +} + +/** + * - **Soft delete** (`force` false): set `isArchived` on the `projects` row (hide from the active list; DB only). + * - **Force** (`force` true): for each session row for that `project_path`, delete the file at `jsonl_path` + * (when set), then remove session rows and the `projects` row. + */ +export async function deleteOrArchiveProject(projectId: string, force: boolean): Promise { + const row = projectsDb.getProjectById(projectId); + if (!row) { + throw new AppError(`Unknown projectId: ${projectId}`, { + code: 'PROJECT_NOT_FOUND', + statusCode: 404, + }); + } + + if (!force) { + projectsDb.updateProjectIsArchivedById(projectId, true); + return; + } + + await deleteSessionJsonlFilesForProjectPath(row.project_path); + sessionsDb.deleteSessionsByProjectPath(row.project_path); + projectsDb.deleteProjectById(projectId); +} diff --git a/server/modules/projects/services/project-management.service.ts b/server/modules/projects/services/project-management.service.ts new file mode 100644 index 00000000..9dbe857e --- /dev/null +++ b/server/modules/projects/services/project-management.service.ts @@ -0,0 +1,150 @@ +import fs from 'node:fs/promises'; +import path from 'node:path'; + +import { projectsDb } from '@/modules/database/index.js'; +import type { + CreateProjectPathResult, + ProjectRepositoryRow, + WorkspacePathValidationResult, +} from '@/shared/types.js'; +import { AppError, normalizeProjectPath, validateWorkspacePath } from '@/shared/utils.js'; + +type CreateProjectInput = { + projectPath: string; + customName?: string | null; +}; + +type CreateProjectDependencies = { + validatePath: (projectPath: string) => Promise; + ensureWorkspaceDirectory: (projectPath: string) => Promise; + persistProjectPath: (projectPath: string, customName: string | null) => CreateProjectPathResult; + getProjectByPath: (projectPath: string) => ProjectRepositoryRow | null; +}; + +type ProjectApiView = { + projectId: string; + path: string; + fullPath: string; + displayName: string; + customName: string | null; + isArchived: boolean; + isStarred: boolean; + sessions: []; + cursorSessions: []; + codexSessions: []; + geminiSessions: []; + sessionMeta: { + hasMore: false; + total: 0; + }; +}; + +type CreateProjectServiceResult = { + outcome: 'created' | 'reactivated_archived'; + project: ProjectApiView; +}; + +const defaultDependencies: CreateProjectDependencies = { + validatePath: validateWorkspacePath, + ensureWorkspaceDirectory: async (projectPath: string): Promise => { + await fs.mkdir(projectPath, { recursive: true }); + const directoryStats = await fs.stat(projectPath); + if (!directoryStats.isDirectory()) { + throw new AppError('Path exists but is not a directory', { + code: 'PROJECT_PATH_NOT_DIRECTORY', + statusCode: 400, + }); + } + }, + persistProjectPath: (projectPath: string, customName: string | null): CreateProjectPathResult => + projectsDb.createProjectPath(projectPath, customName), + getProjectByPath: (projectPath: string): ProjectRepositoryRow | null => + projectsDb.getProjectPath(projectPath), +}; + +function resolveDisplayName(customName: string | null | undefined, projectPath: string): string { + const trimmedCustomName = typeof customName === 'string' ? customName.trim() : ''; + if (trimmedCustomName.length > 0) { + return trimmedCustomName; + } + + return path.basename(projectPath) || projectPath; +} + +function mapProjectRowToApiView(projectRow: ProjectRepositoryRow): ProjectApiView { + return { + projectId: projectRow.project_id, + path: projectRow.project_path, + fullPath: projectRow.project_path, + displayName: resolveDisplayName(projectRow.custom_project_name, projectRow.project_path), + customName: projectRow.custom_project_name, + isArchived: Boolean(projectRow.isArchived), + isStarred: Boolean(projectRow.isStarred), + sessions: [], + cursorSessions: [], + codexSessions: [], + geminiSessions: [], + sessionMeta: { + hasMore: false, + total: 0, + }, + }; +} + +export async function createProject( + input: CreateProjectInput, + dependencies: CreateProjectDependencies = defaultDependencies, +): Promise { + const normalizedPath = normalizeProjectPath(input.projectPath || ''); + if (!normalizedPath) { + throw new AppError('path is required', { + code: 'PROJECT_PATH_REQUIRED', + statusCode: 400, + }); + } + + const pathValidation = await dependencies.validatePath(normalizedPath); + if (!pathValidation.valid || !pathValidation.resolvedPath) { + throw new AppError('Invalid project path', { + code: 'INVALID_PROJECT_PATH', + statusCode: 400, + details: pathValidation.error ?? 'Path validation failed', + }); + } + + const resolvedProjectPath = normalizeProjectPath(pathValidation.resolvedPath); + await dependencies.ensureWorkspaceDirectory(resolvedProjectPath); + + const normalizedCustomName = resolveDisplayName(input.customName ?? null, resolvedProjectPath); + const persistedProject = dependencies.persistProjectPath(resolvedProjectPath, normalizedCustomName); + + if (persistedProject.outcome === 'active_conflict') { + throw new AppError('Project path already exists and is active', { + code: 'PROJECT_ALREADY_EXISTS', + statusCode: 409, + details: `Project path already exists: ${resolvedProjectPath}`, + }); + } + + const projectRow = persistedProject.project ?? dependencies.getProjectByPath(resolvedProjectPath); + if (!projectRow) { + throw new AppError('Failed to resolve project after creation', { + code: 'PROJECT_CREATE_FAILED', + statusCode: 500, + }); + } + + // Archived rows intentionally remain archived when reused, as requested. + return { + outcome: persistedProject.outcome, + project: mapProjectRowToApiView(projectRow), + }; +} + +/** + * Sets `projects.custom_project_name` for the given `projectId` (or clears it when empty). + */ +export function updateProjectDisplayName(projectId: string, newDisplayName: unknown): void { + const trimmed = typeof newDisplayName === 'string' ? newDisplayName.trim() : ''; + projectsDb.updateCustomProjectNameById(projectId, trimmed.length > 0 ? trimmed : null); +} diff --git a/server/modules/projects/services/project-star.service.ts b/server/modules/projects/services/project-star.service.ts new file mode 100644 index 00000000..2c7f11ac --- /dev/null +++ b/server/modules/projects/services/project-star.service.ts @@ -0,0 +1,78 @@ +import { projectsDb } from '@/modules/database/index.js'; +import { AppError } from '@/shared/utils.js'; + +type ToggleProjectStarResult = { + isStarred: boolean; +}; + +type ApplyLegacyStarredProjectIdsResult = { + updated: number; +}; + +function normalizeProjectId(projectId: string): string { + return projectId.trim(); +} + +function uniqueProjectIds(projectIds: string[]): string[] { + const uniqueIds = new Set(); + for (const projectId of projectIds) { + const normalizedProjectId = normalizeProjectId(projectId); + if (!normalizedProjectId) { + continue; + } + uniqueIds.add(normalizedProjectId); + } + return [...uniqueIds]; +} + +/** + * Applies legacy `localStorage` stars keyed by DB `projectId` onto `projects.isStarred`. + * + * The operation is idempotent: already-starred projects are ignored, unknown ids are skipped. + */ +export function applyLegacyStarredProjectIds(projectIds: string[]): ApplyLegacyStarredProjectIdsResult { + const normalizedProjectIds = uniqueProjectIds(projectIds); + let updated = 0; + + for (const projectId of normalizedProjectIds) { + const project = projectsDb.getProjectById(projectId); + if (!project) { + continue; + } + + if (Boolean(project.isStarred)) { + continue; + } + + projectsDb.updateProjectIsStarredById(projectId, true); + updated += 1; + } + + return { updated }; +} + +/** + * Flips `projects.isStarred` for one project and returns the new state. + */ +export function toggleProjectStar(projectId: string): ToggleProjectStarResult { + const normalizedProjectId = normalizeProjectId(projectId); + if (!normalizedProjectId) { + throw new AppError('projectId is required', { + code: 'PROJECT_ID_REQUIRED', + statusCode: 400, + }); + } + + const project = projectsDb.getProjectById(normalizedProjectId); + if (!project) { + throw new AppError('Project not found', { + code: 'PROJECT_NOT_FOUND', + statusCode: 404, + }); + } + + const nextStarredState = !Boolean(project.isStarred); + projectsDb.updateProjectIsStarredById(normalizedProjectId, nextStarredState); + + return { isStarred: nextStarredState }; +} diff --git a/server/modules/projects/services/projects-has-taskmaster.service.ts b/server/modules/projects/services/projects-has-taskmaster.service.ts new file mode 100644 index 00000000..ee1d6c6c --- /dev/null +++ b/server/modules/projects/services/projects-has-taskmaster.service.ts @@ -0,0 +1,248 @@ +import { access, readFile, stat } from 'node:fs/promises'; +import path from 'node:path'; + +import { projectsDb } from '@/modules/database/index.js'; +import { AppError } from '@/shared/utils.js'; + +type TaskMasterTask = { + status?: string; + subtasks?: Array<{ + status?: string; + }>; +}; + +type TaskMasterMetadata = + | { + taskCount: number; + subtaskCount: number; + completed: number; + pending: number; + inProgress: number; + review: number; + completionPercentage: number; + lastModified: string; + } + | { + error: string; + } + | null; + +type TaskMasterDetectionResult = { + hasTaskmaster: boolean; + hasEssentialFiles?: boolean; + files?: Record; + metadata?: TaskMasterMetadata; + path?: string; + reason?: string; +}; + +type NormalizedTaskMasterInfo = { + hasTaskmaster: boolean; + hasEssentialFiles: boolean; + metadata: TaskMasterMetadata; + status: 'configured' | 'not-configured'; +}; + +type GetProjectTaskMasterByIdResult = { + projectId: string; + projectPath: string; + taskmaster: NormalizedTaskMasterInfo; +}; + +type GetProjectTaskMasterDependencies = { + resolveProjectPathById: (projectId: string) => string | null; + detectTaskMasterFolder: (projectPath: string) => Promise; +}; + +type GetProjectTaskMasterResolver = (projectId: string) => Promise; + +function extractTasksFromJson(tasksData: unknown): TaskMasterTask[] { + if (!tasksData || typeof tasksData !== 'object') { + return []; + } + + const legacyTasks = (tasksData as { tasks?: unknown }).tasks; + if (Array.isArray(legacyTasks)) { + return legacyTasks as TaskMasterTask[]; + } + + const taggedTaskCollections: TaskMasterTask[] = []; + for (const tagValue of Object.values(tasksData)) { + if (!tagValue || typeof tagValue !== 'object') { + continue; + } + + const tagTasks = (tagValue as { tasks?: unknown }).tasks; + if (Array.isArray(tagTasks)) { + taggedTaskCollections.push(...(tagTasks as TaskMasterTask[])); + } + } + + return taggedTaskCollections; +} + +async function detectTaskMasterFolder(projectPath: string): Promise { + try { + const taskMasterPath = path.join(projectPath, '.taskmaster'); + + try { + const taskMasterStats = await stat(taskMasterPath); + if (!taskMasterStats.isDirectory()) { + return { + hasTaskmaster: false, + reason: '.taskmaster exists but is not a directory', + }; + } + } catch (error) { + const fileError = error as NodeJS.ErrnoException; + if (fileError.code === 'ENOENT') { + return { + hasTaskmaster: false, + reason: '.taskmaster directory not found', + }; + } + + throw fileError; + } + + const keyFiles = ['tasks/tasks.json', 'config.json']; + const fileStatus: Record = {}; + let hasEssentialFiles = true; + + for (const fileName of keyFiles) { + const absoluteFilePath = path.join(taskMasterPath, fileName); + try { + await access(absoluteFilePath); + fileStatus[fileName] = true; + } catch { + fileStatus[fileName] = false; + if (fileName === 'tasks/tasks.json') { + hasEssentialFiles = false; + } + } + } + + let taskMetadata: TaskMasterMetadata = null; + if (fileStatus['tasks/tasks.json']) { + const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json'); + try { + const tasksContent = await readFile(tasksPath, 'utf8'); + const parsedTasksJson = JSON.parse(tasksContent) as unknown; + const tasks = extractTasksFromJson(parsedTasksJson); + + const stats = tasks.reduce( + (accumulator, currentTask) => { + accumulator.total += 1; + const normalizedTaskStatus = currentTask.status || 'pending'; + accumulator.byStatus[normalizedTaskStatus] = (accumulator.byStatus[normalizedTaskStatus] || 0) + 1; + + if (Array.isArray(currentTask.subtasks)) { + for (const subtask of currentTask.subtasks) { + accumulator.subtotalTasks += 1; + const normalizedSubtaskStatus = subtask.status || 'pending'; + accumulator.subtaskByStatus[normalizedSubtaskStatus] = + (accumulator.subtaskByStatus[normalizedSubtaskStatus] || 0) + 1; + } + } + + return accumulator; + }, + { + total: 0, + subtotalTasks: 0, + byStatus: {} as Record, + subtaskByStatus: {} as Record, + }, + ); + + const tasksStat = await stat(tasksPath); + taskMetadata = { + taskCount: stats.total, + subtaskCount: stats.subtotalTasks, + completed: stats.byStatus.done || 0, + pending: stats.byStatus.pending || 0, + inProgress: stats.byStatus['in-progress'] || 0, + review: stats.byStatus.review || 0, + completionPercentage: stats.total > 0 ? Math.round(((stats.byStatus.done || 0) / stats.total) * 100) : 0, + lastModified: tasksStat.mtime.toISOString(), + }; + } catch (parseError) { + console.warn('Failed to parse tasks.json:', (parseError as Error).message); + taskMetadata = { + error: 'Failed to parse tasks.json', + }; + } + } + + return { + hasTaskmaster: true, + hasEssentialFiles, + files: fileStatus, + metadata: taskMetadata, + path: taskMasterPath, + }; + } catch (error) { + console.error('Error detecting TaskMaster folder:', error); + return { + hasTaskmaster: false, + reason: `Error checking directory: ${(error as Error).message}`, + }; + } +} + +function normalizeTaskMasterInfo(taskMasterResult: TaskMasterDetectionResult | null = null): NormalizedTaskMasterInfo { + const hasTaskmaster = Boolean(taskMasterResult?.hasTaskmaster); + const hasEssentialFiles = Boolean(taskMasterResult?.hasEssentialFiles); + + return { + hasTaskmaster, + hasEssentialFiles, + metadata: taskMasterResult?.metadata ?? null, + status: hasTaskmaster && hasEssentialFiles ? 'configured' : 'not-configured', + }; +} + +const defaultDependencies: GetProjectTaskMasterDependencies = { + resolveProjectPathById: (projectId: string): string | null => projectsDb.getProjectPathById(projectId), + detectTaskMasterFolder, +}; + +export async function getProjectTaskMasterById( + projectId: string, + dependencies: GetProjectTaskMasterDependencies = defaultDependencies, +): Promise { + const projectPath = dependencies.resolveProjectPathById(projectId); + if (!projectPath) { + return null; + } + + const taskMasterResult = await dependencies.detectTaskMasterFolder(projectPath); + return { + projectId, + projectPath, + taskmaster: normalizeTaskMasterInfo(taskMasterResult), + }; +} + +export async function getProjectTaskMaster( + projectId: string, + resolveById: GetProjectTaskMasterResolver = getProjectTaskMasterById, +): Promise { + const normalizedProjectId = projectId.trim(); + if (!normalizedProjectId) { + throw new AppError('projectId is required', { + code: 'PROJECT_ID_REQUIRED', + statusCode: 400, + }); + } + + const taskMasterDetails = await resolveById(normalizedProjectId); + if (!taskMasterDetails) { + throw new AppError('Project not found', { + code: 'PROJECT_NOT_FOUND', + statusCode: 404, + }); + } + + return taskMasterDetails; +} diff --git a/server/modules/projects/services/projects-with-sessions-fetch.service.ts b/server/modules/projects/services/projects-with-sessions-fetch.service.ts new file mode 100644 index 00000000..4d473a21 --- /dev/null +++ b/server/modules/projects/services/projects-with-sessions-fetch.service.ts @@ -0,0 +1,285 @@ +import fs from 'node:fs/promises'; +import path from 'node:path'; + +import { projectsDb, sessionsDb } from '@/modules/database/index.js'; +import { sessionSynchronizerService } from '@/modules/providers/index.js'; +import { WS_OPEN_STATE, connectedClients } from '@/modules/websocket/index.js'; +import type { RealtimeClientConnection } from '@/shared/types.js'; +import { AppError } from '@/shared/utils.js'; + +type SessionSummary = { + id: string; + summary: string; + messageCount: number; + lastActivity: string; +}; + +type SessionsByProvider = Record<'claude' | 'cursor' | 'codex' | 'gemini', SessionSummary[]>; + +type SessionRepositoryRow = { + provider: string; + session_id: string; + custom_name?: string | null; + updated_at?: string | null; + created_at?: string | null; +}; + +export type ProjectListItem = { + projectId: string; + path: string; + displayName: string; + fullPath: string; + isStarred: boolean; + sessions: SessionSummary[]; + cursorSessions: SessionSummary[]; + codexSessions: SessionSummary[]; + geminiSessions: SessionSummary[]; + sessionMeta: { + hasMore: boolean; + total: number; + }; +}; + +type ProgressUpdate = { + phase: 'loading' | 'complete'; + current: number; + total: number; + currentProject?: string; +}; + +type GetProjectsWithSessionsOptions = { + skipSynchronization?: boolean; + sessionsLimit?: number; + sessionsOffset?: number; +}; + +type SessionPaginationOptions = { + limit?: number; + offset?: number; +}; + +type ProjectSessionsPageResult = { + sessionsByProvider: SessionsByProvider; + total: number; + hasMore: boolean; +}; + +export type ProjectSessionsPageApiView = { + projectId: string; + sessions: SessionSummary[]; + cursorSessions: SessionSummary[]; + codexSessions: SessionSummary[]; + geminiSessions: SessionSummary[]; + sessionMeta: { + hasMore: boolean; + total: number; + }; +}; + +const DEFAULT_PROJECT_SESSIONS_PAGE_SIZE = 20; +const MAX_PROJECT_SESSIONS_PAGE_SIZE = 200; + +/** + * Generate better display name from path. + */ +export async function generateDisplayName(projectName: string, actualProjectDir: string | null = null): Promise { + // Use actual project directory if provided, otherwise decode from project name. + const projectPath = actualProjectDir || projectName.replace(/-/g, '/'); + + // Try to read package.json from the project path. + try { + const packageJsonPath = path.join(projectPath, 'package.json'); + const packageData = await fs.readFile(packageJsonPath, 'utf8'); + const packageJson = JSON.parse(packageData) as { name?: string }; + + // Return the name from package.json if it exists. + if (packageJson.name) { + return packageJson.name; + } + } catch { + // Fall back to path-based naming if package.json doesn't exist or can't be read. + } + + // If it starts with /, it's an absolute path. + if (projectPath.startsWith('/')) { + const parts = projectPath.split('/').filter(Boolean); + // Return only the last folder name. + return parts[parts.length - 1] || projectPath; + } + + return projectPath; +} + +function normalizeSessionPagination(options: SessionPaginationOptions = {}): { limit: number; offset: number } { + const rawLimit = Number.isFinite(options.limit) ? Math.floor(Number(options.limit)) : DEFAULT_PROJECT_SESSIONS_PAGE_SIZE; + const rawOffset = Number.isFinite(options.offset) ? Math.floor(Number(options.offset)) : 0; + + return { + limit: Math.min(Math.max(1, rawLimit), MAX_PROJECT_SESSIONS_PAGE_SIZE), + offset: Math.max(0, rawOffset), + }; +} + +function mapSessionRowToSummary(row: SessionRepositoryRow): SessionSummary { + return { + id: row.session_id, + summary: row.custom_name || '', + messageCount: 0, + lastActivity: row.updated_at ?? row.created_at ?? new Date().toISOString(), + }; +} + +function bucketSessionRowsByProvider(rows: SessionRepositoryRow[]): SessionsByProvider { + const byProvider: SessionsByProvider = { + claude: [], + cursor: [], + codex: [], + gemini: [], + }; + + for (const row of rows) { + const provider = row.provider as keyof SessionsByProvider; + const bucket = byProvider[provider]; + if (!bucket) { + continue; + } + + bucket.push(mapSessionRowToSummary(row)); + } + + return byProvider; +} + +/** + * Reads one paginated project session slice from the DB and groups rows by provider. + */ +function readProjectSessionsPageByPath( + projectPath: string, + options: SessionPaginationOptions = {}, +): ProjectSessionsPageResult { + const pagination = normalizeSessionPagination(options); + const rows = sessionsDb.getSessionsByProjectPathPage( + projectPath, + pagination.limit, + pagination.offset, + ) as SessionRepositoryRow[]; + const total = sessionsDb.countSessionsByProjectPath(projectPath); + + return { + sessionsByProvider: bucketSessionRowsByProvider(rows), + total, + hasMore: pagination.offset + rows.length < total, + }; +} + +// Broadcast progress to all connected WebSocket clients +function broadcastProgress(progress: ProgressUpdate) { + const message = JSON.stringify({ + type: 'loading_progress', + ...progress, + }); + + connectedClients.forEach((client: RealtimeClientConnection) => { + if (client.readyState === WS_OPEN_STATE) { + client.send(message); + } + }); +} + +/** + * Reads all projects from DB and returns provider-bucketed session summaries. + */ +export async function getProjectsWithSessions( + options: GetProjectsWithSessionsOptions = {} +): Promise { + if (!options.skipSynchronization) { + await sessionSynchronizerService.synchronizeSessions(); + } + + const projectRows = projectsDb.getProjectPaths() as Array<{ + project_id: string; + project_path: string; + custom_project_name?: string | null; + isStarred?: number; + }>; + const totalProjects = projectRows.length; + const projects: ProjectListItem[] = []; + let processedProjects = 0; + + for (const row of projectRows) { + processedProjects += 1; + + const projectId = row.project_id; + const projectPath = row.project_path; + + broadcastProgress({ + phase: 'loading', + current: processedProjects, + total: totalProjects, + currentProject: projectPath, + }); + + const displayName = + row.custom_project_name && row.custom_project_name.trim().length > 0 + ? row.custom_project_name + : await generateDisplayName(path.basename(projectPath) || projectPath, projectPath); + + const sessionsPage = readProjectSessionsPageByPath(projectPath, { + limit: options.sessionsLimit, + offset: options.sessionsOffset, + }); + + projects.push({ + projectId, + path: projectPath, + displayName, + fullPath: projectPath, + isStarred: Boolean(row.isStarred), + sessions: sessionsPage.sessionsByProvider.claude, + cursorSessions: sessionsPage.sessionsByProvider.cursor, + codexSessions: sessionsPage.sessionsByProvider.codex, + geminiSessions: sessionsPage.sessionsByProvider.gemini, + sessionMeta: { + hasMore: sessionsPage.hasMore, + total: sessionsPage.total, + }, + }); + } + + broadcastProgress({ + phase: 'complete', + current: totalProjects, + total: totalProjects, + }); + + return projects; +} + +/** + * Loads one paginated session slice for a specific project id. + */ +export async function getProjectSessionsPage( + projectId: string, + options: SessionPaginationOptions = {}, +): Promise { + const projectRow = projectsDb.getProjectById(projectId); + if (!projectRow) { + throw new AppError(`Project "${projectId}" was not found.`, { + code: 'PROJECT_NOT_FOUND', + statusCode: 404, + }); + } + + const sessionsPage = readProjectSessionsPageByPath(projectRow.project_path, options); + return { + projectId: projectRow.project_id, + sessions: sessionsPage.sessionsByProvider.claude, + cursorSessions: sessionsPage.sessionsByProvider.cursor, + codexSessions: sessionsPage.sessionsByProvider.codex, + geminiSessions: sessionsPage.sessionsByProvider.gemini, + sessionMeta: { + hasMore: sessionsPage.hasMore, + total: sessionsPage.total, + }, + }; +} diff --git a/server/modules/projects/tests/project-clone.service.test.ts b/server/modules/projects/tests/project-clone.service.test.ts new file mode 100644 index 00000000..85c807d8 --- /dev/null +++ b/server/modules/projects/tests/project-clone.service.test.ts @@ -0,0 +1,183 @@ +import assert from 'node:assert/strict'; +import { EventEmitter } from 'node:events'; +import path from 'node:path'; +import { PassThrough } from 'node:stream'; +import test from 'node:test'; + +import { startCloneProject } from '@/modules/projects/services/project-clone.service.js'; +import { AppError } from '@/shared/utils.js'; + +type TestDependencies = Parameters[2]; + +function buildDependencies(overrides: Partial> = {}): NonNullable { + return { + validatePath: async () => ({ valid: true, resolvedPath: '/workspace/root' }), + ensureDirectory: async () => undefined, + pathExists: async () => false, + removePath: async () => undefined, + getGithubTokenById: async () => ({ github_token: 'token-value' }), + spawnGitClone: () => { + throw new Error('spawnGitClone should be overridden in this test'); + }, + registerProject: async () => ({ project: { projectId: 'project-1' } }), + logError: () => undefined, + ...overrides, + }; +} + +function createMockGitProcess() { + const emitter = new EventEmitter() as EventEmitter & { + stdout: PassThrough; + stderr: PassThrough; + kill: () => void; + }; + + emitter.stdout = new PassThrough(); + emitter.stderr = new PassThrough(); + emitter.kill = () => { + emitter.emit('close', null); + }; + + return emitter; +} + +test('startCloneProject rejects when workspace path is missing', async () => { + await assert.rejects( + async () => + startCloneProject( + { + workspacePath: '', + githubUrl: 'https://github.com/example/repo', + userId: 1, + }, + { + onProgress: () => undefined, + onComplete: () => undefined, + }, + buildDependencies(), + ), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'WORKSPACE_PATH_REQUIRED'); + return true; + }, + ); +}); + +test('startCloneProject rejects when github URL is missing', async () => { + await assert.rejects( + async () => + startCloneProject( + { + workspacePath: '/workspace/root', + githubUrl: '', + userId: 1, + }, + { + onProgress: () => undefined, + onComplete: () => undefined, + }, + buildDependencies(), + ), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'GITHUB_URL_REQUIRED'); + return true; + }, + ); +}); + +test('startCloneProject rejects github URL values that begin with option prefixes', async () => { + await assert.rejects( + async () => + startCloneProject( + { + workspacePath: '/workspace/root', + githubUrl: '--upload-pack=malicious', + userId: 1, + }, + { + onProgress: () => undefined, + onComplete: () => undefined, + }, + buildDependencies(), + ), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'INVALID_GITHUB_URL'); + return true; + }, + ); +}); + +test('startCloneProject rejects when selected github token does not exist', async () => { + await assert.rejects( + async () => + startCloneProject( + { + workspacePath: '/workspace/root', + githubUrl: 'https://github.com/example/repo', + githubTokenId: 12, + userId: 1, + }, + { + onProgress: () => undefined, + onComplete: () => undefined, + }, + buildDependencies({ + getGithubTokenById: async () => null, + }), + ), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'GITHUB_TOKEN_NOT_FOUND'); + return true; + }, + ); +}); + +test('startCloneProject completes and emits complete payload when git exits successfully', async () => { + const gitProcess = createMockGitProcess(); + const progressMessages: string[] = []; + let completePayload: { project: Record; message: string } | null = null; + let capturedProjectPath = ''; + let capturedCustomName = ''; + + const operation = await startCloneProject( + { + workspacePath: '/workspace/root', + githubUrl: 'https://github.com/example/repo.git', + userId: 1, + }, + { + onProgress: (message) => { + progressMessages.push(message); + }, + onComplete: (payload: { project: Record; message: string }) => { + completePayload = payload; + }, + }, + buildDependencies({ + spawnGitClone: () => gitProcess as any, + registerProject: async (projectPath, customName) => { + capturedProjectPath = projectPath; + capturedCustomName = customName; + return { project: { projectId: 'project-1', path: projectPath } }; + }, + }), + ); + + gitProcess.emit('close', 0); + await operation.waitForCompletion; + + assert.ok(progressMessages.some((message) => message.includes("Cloning into 'repo'"))); + assert.equal(capturedCustomName, 'repo'); + assert.equal(path.basename(capturedProjectPath), 'repo'); + assert.notEqual(completePayload, null); + const resolvedCompletePayload = completePayload as unknown as { + project: Record; + message: string; + }; + assert.equal(resolvedCompletePayload.message, 'Repository cloned successfully'); + assert.equal((resolvedCompletePayload.project.projectId as string) || '', 'project-1'); +}); diff --git a/server/modules/projects/tests/project-management.service.test.ts b/server/modules/projects/tests/project-management.service.test.ts new file mode 100644 index 00000000..3b0e47bb --- /dev/null +++ b/server/modules/projects/tests/project-management.service.test.ts @@ -0,0 +1,117 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; + +import { createProject } from '@/modules/projects/services/project-management.service.js'; +import { AppError } from '@/shared/utils.js'; + +const projectRow = { + project_id: 'project-1', + project_path: '/workspace/my-project', + custom_project_name: 'my-project', + isStarred: 0, + isArchived: 0, +}; + +test('createProject throws when project path is missing', async () => { + await assert.rejects( + async () => createProject({ projectPath: '' }), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'PROJECT_PATH_REQUIRED'); + assert.equal(error.statusCode, 400); + return true; + }, + ); +}); + +test('createProject throws when path validation fails', async () => { + await assert.rejects( + async () => + createProject( + { projectPath: '/invalid/path' }, + { + validatePath: async () => ({ valid: false, error: 'blocked path' }), + ensureWorkspaceDirectory: async () => undefined, + persistProjectPath: () => ({ outcome: 'created', project: projectRow }), + getProjectByPath: () => projectRow, + }, + ), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'INVALID_PROJECT_PATH'); + assert.equal(error.statusCode, 400); + assert.equal(error.details, 'blocked path'); + return true; + }, + ); +}); + +test('createProject throws conflict when active project path already exists', async () => { + await assert.rejects( + async () => + createProject( + { projectPath: '/workspace/my-project' }, + { + validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }), + ensureWorkspaceDirectory: async () => undefined, + persistProjectPath: () => ({ outcome: 'active_conflict', project: projectRow }), + getProjectByPath: () => projectRow, + }, + ), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'PROJECT_ALREADY_EXISTS'); + assert.equal(error.statusCode, 409); + assert.equal(error.details, 'Project path already exists: /workspace/my-project'); + return true; + }, + ); +}); + +test('createProject falls back to directory name when custom name is not provided', async () => { + let capturedCustomName: string | null = null; + + const result = await createProject( + { projectPath: '/workspace/my-project', customName: '' }, + { + validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }), + ensureWorkspaceDirectory: async () => undefined, + persistProjectPath: (_projectPath, customName) => { + capturedCustomName = customName; + return { + outcome: 'created', + project: { + ...projectRow, + custom_project_name: customName, + }, + }; + }, + getProjectByPath: () => projectRow, + }, + ); + + assert.equal(capturedCustomName, 'my-project'); + assert.equal(result.outcome, 'created'); + assert.equal(result.project.displayName, 'my-project'); +}); + +test('createProject returns archived reuse outcome when archived row is reused', async () => { + const result = await createProject( + { projectPath: '/workspace/my-project' }, + { + validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }), + ensureWorkspaceDirectory: async () => undefined, + persistProjectPath: () => ({ + outcome: 'reactivated_archived', + project: { + ...projectRow, + isArchived: 1, + }, + }), + getProjectByPath: () => projectRow, + }, + ); + + assert.equal(result.outcome, 'reactivated_archived'); + assert.equal(result.project.isArchived, true); +}); diff --git a/server/modules/projects/tests/project-star.service.test.ts b/server/modules/projects/tests/project-star.service.test.ts new file mode 100644 index 00000000..ea594c86 --- /dev/null +++ b/server/modules/projects/tests/project-star.service.test.ts @@ -0,0 +1,123 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; + +import { projectsDb } from '@/modules/database/index.js'; +import { applyLegacyStarredProjectIds, toggleProjectStar } from '@/modules/projects/services/project-star.service.js'; +import { AppError } from '@/shared/utils.js'; + +type ProjectRow = { + project_id: string; + project_path: string; + custom_project_name: string | null; + isStarred: number; + isArchived: number; +}; + +test('toggleProjectStar throws when projectId is missing', () => { + assert.throws( + () => toggleProjectStar(' '), + (error: unknown) => + error instanceof AppError + && error.code === 'PROJECT_ID_REQUIRED' + && error.statusCode === 400, + ); +}); + +test('toggleProjectStar throws when project does not exist', () => { + const originalGetProjectById = projectsDb.getProjectById; + try { + projectsDb.getProjectById = () => null; + assert.throws( + () => toggleProjectStar('project-1'), + (error: unknown) => + error instanceof AppError + && error.code === 'PROJECT_NOT_FOUND' + && error.statusCode === 404, + ); + } finally { + projectsDb.getProjectById = originalGetProjectById; + } +}); + +test('toggleProjectStar flips star state and persists it', () => { + const originalGetProjectById = projectsDb.getProjectById; + const originalUpdateProjectIsStarredById = projectsDb.updateProjectIsStarredById; + + let capturedProjectId = ''; + let capturedState = false; + + try { + projectsDb.getProjectById = () => + ({ + project_id: 'project-1', + project_path: '/workspace/project-1', + custom_project_name: 'project-1', + isStarred: 0, + isArchived: 0, + }) as ProjectRow; + projectsDb.updateProjectIsStarredById = (projectId: string, isStarred: boolean) => { + capturedProjectId = projectId; + capturedState = isStarred; + }; + + const result = toggleProjectStar('project-1'); + + assert.equal(result.isStarred, true); + assert.equal(capturedProjectId, 'project-1'); + assert.equal(capturedState, true); + } finally { + projectsDb.getProjectById = originalGetProjectById; + projectsDb.updateProjectIsStarredById = originalUpdateProjectIsStarredById; + } +}); + +test('applyLegacyStarredProjectIds stars only valid, unstarred projects', () => { + const originalGetProjectById = projectsDb.getProjectById; + const originalUpdateProjectIsStarredById = projectsDb.updateProjectIsStarredById; + + const updatedProjectIds: string[] = []; + + try { + projectsDb.getProjectById = (projectId: string) => { + if (projectId === 'project-a') { + return { + project_id: 'project-a', + project_path: '/workspace/project-a', + custom_project_name: 'A', + isStarred: 0, + isArchived: 0, + } as ProjectRow; + } + + if (projectId === 'project-b') { + return { + project_id: 'project-b', + project_path: '/workspace/project-b', + custom_project_name: 'B', + isStarred: 1, + isArchived: 0, + } as ProjectRow; + } + + return null; + }; + projectsDb.updateProjectIsStarredById = (projectId: string) => { + updatedProjectIds.push(projectId); + }; + + const result = applyLegacyStarredProjectIds([ + 'project-a', + 'project-b', + 'missing-project', + 'project-a', + '', + ' ', + ]); + + assert.equal(result.updated, 1); + assert.deepEqual(updatedProjectIds, ['project-a']); + } finally { + projectsDb.getProjectById = originalGetProjectById; + projectsDb.updateProjectIsStarredById = originalUpdateProjectIsStarredById; + } +}); diff --git a/server/modules/projects/tests/projects-has-taskmaster.service.test.ts b/server/modules/projects/tests/projects-has-taskmaster.service.test.ts new file mode 100644 index 00000000..f7e8863d --- /dev/null +++ b/server/modules/projects/tests/projects-has-taskmaster.service.test.ts @@ -0,0 +1,105 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; + +import { + getProjectTaskMaster, + getProjectTaskMasterById, +} from '@/modules/projects/services/projects-has-taskmaster.service.js'; +import { AppError } from '@/shared/utils.js'; + +test('getProjectTaskMasterById returns null when project path is missing', async () => { + const result = await getProjectTaskMasterById('project-1', { + resolveProjectPathById: () => null, + detectTaskMasterFolder: async () => { + throw new Error('detectTaskMasterFolder should not be called when path is missing'); + }, + }); + + assert.equal(result, null); +}); + +test('getProjectTaskMasterById returns configured status when taskmaster exists with essential files', async () => { + const result = await getProjectTaskMasterById('project-1', { + resolveProjectPathById: () => '/workspace/project-1', + detectTaskMasterFolder: async () => ({ + hasTaskmaster: true, + hasEssentialFiles: true, + metadata: { + taskCount: 3, + subtaskCount: 0, + completed: 1, + pending: 2, + inProgress: 0, + review: 0, + completionPercentage: 33, + lastModified: '2026-01-01T00:00:00.000Z', + }, + }), + }); + + assert.ok(result); + assert.equal(result.projectId, 'project-1'); + assert.equal(result.projectPath, '/workspace/project-1'); + assert.equal(result.taskmaster.hasTaskmaster, true); + assert.equal(result.taskmaster.hasEssentialFiles, true); + assert.equal(result.taskmaster.status, 'configured'); + assert.deepEqual(result.taskmaster.metadata, { + taskCount: 3, + subtaskCount: 0, + completed: 1, + pending: 2, + inProgress: 0, + review: 0, + completionPercentage: 33, + lastModified: '2026-01-01T00:00:00.000Z', + }); +}); + +test('getProjectTaskMasterById returns not-configured status when taskmaster is missing', async () => { + const result = await getProjectTaskMasterById('project-1', { + resolveProjectPathById: () => '/workspace/project-1', + detectTaskMasterFolder: async () => ({ + hasTaskmaster: false, + }), + }); + + assert.ok(result); + assert.equal(result.taskmaster.hasTaskmaster, false); + assert.equal(result.taskmaster.hasEssentialFiles, false); + assert.equal(result.taskmaster.status, 'not-configured'); + assert.equal(result.taskmaster.metadata, null); +}); + +test('getProjectTaskMaster throws when project id is missing', async () => { + await assert.rejects( + async () => + getProjectTaskMaster('', async () => ({ + projectId: 'project-1', + projectPath: '/workspace/project-1', + taskmaster: { + hasTaskmaster: true, + hasEssentialFiles: true, + metadata: null, + status: 'configured', + }, + })), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'PROJECT_ID_REQUIRED'); + assert.equal(error.statusCode, 400); + return true; + }, + ); +}); + +test('getProjectTaskMaster throws when project does not exist', async () => { + await assert.rejects( + async () => getProjectTaskMaster('project-that-does-not-exist', async () => null), + (error: unknown) => { + assert.ok(error instanceof AppError); + assert.equal(error.code, 'PROJECT_NOT_FOUND'); + assert.equal(error.statusCode, 404); + return true; + }, + ); +}); diff --git a/server/modules/providers/index.ts b/server/modules/providers/index.ts new file mode 100644 index 00000000..28287299 --- /dev/null +++ b/server/modules/providers/index.ts @@ -0,0 +1,4 @@ +export { sessionSynchronizerService } from './services/session-synchronizer.service.js'; + +export { initializeSessionsWatcher } from './services/sessions-watcher.service.js'; +export { closeSessionsWatcher } from './services/sessions-watcher.service.js'; \ No newline at end of file diff --git a/server/modules/providers/list/claude/claude-session-synchronizer.provider.ts b/server/modules/providers/list/claude/claude-session-synchronizer.provider.ts new file mode 100644 index 00000000..7d089a2d --- /dev/null +++ b/server/modules/providers/list/claude/claude-session-synchronizer.provider.ts @@ -0,0 +1,110 @@ +import os from 'node:os'; +import path from 'node:path'; + +import { sessionsDb } from '@/modules/database/index.js'; +import { + buildLookupMap, + extractFirstValidJsonlData, + findFilesRecursivelyCreatedAfter, + normalizeSessionName, + readFileTimestamps, +} from '@/shared/utils.js'; +import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js'; + +type ParsedSession = { + sessionId: string; + projectPath: string; + sessionName?: string; +}; + +/** + * Session indexer for Claude transcript artifacts. + */ +export class ClaudeSessionSynchronizer implements IProviderSessionSynchronizer { + private readonly provider = 'claude' as const; + private readonly claudeHome = path.join(os.homedir(), '.claude'); + + /** + * Scans ~/.claude/projects and upserts discovered sessions into DB. + */ + async synchronize(since?: Date): Promise { + const nameMap = await buildLookupMap(path.join(this.claudeHome, 'history.jsonl'), 'sessionId', 'display'); + const files = await findFilesRecursivelyCreatedAfter( + path.join(this.claudeHome, 'projects'), + '.jsonl', + since ?? null + ); + + let processed = 0; + for (const filePath of files) { + const parsed = await this.processSessionFile(filePath, nameMap); + if (!parsed) { + continue; + } + + const timestamps = await readFileTimestamps(filePath); + sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + processed += 1; + } + + return processed; + } + + /** + * Parses and upserts one Claude session JSONL file. + */ + async synchronizeFile(filePath: string): Promise { + if (!filePath.endsWith('.jsonl')) { + return null; + } + + const nameMap = await buildLookupMap(path.join(this.claudeHome, 'history.jsonl'), 'sessionId', 'display'); + const parsed = await this.processSessionFile(filePath, nameMap); + if (!parsed) { + return null; + } + + const timestamps = await readFileTimestamps(filePath); + return sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + } + + /** + * Extracts session metadata from one Claude JSONL session file. + */ + private async processSessionFile( + filePath: string, + nameMap: Map + ): Promise { + return extractFirstValidJsonlData(filePath, (rawData) => { + const data = rawData as Record; + const sessionId = typeof data.sessionId === 'string' ? data.sessionId : undefined; + const projectPath = typeof data.cwd === 'string' ? data.cwd : undefined; + + if (!sessionId || !projectPath) { + return null; + } + + return { + sessionId, + projectPath, + sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Claude Session'), + }; + }); + } +} diff --git a/server/modules/providers/list/claude/claude-sessions.provider.ts b/server/modules/providers/list/claude/claude-sessions.provider.ts index 72bbe07e..ffd358f3 100644 --- a/server/modules/providers/list/claude/claude-sessions.provider.ts +++ b/server/modules/providers/list/claude/claude-sessions.provider.ts @@ -1,7 +1,12 @@ -import { getSessionMessages } from '@/projects.js'; +import fs from 'node:fs'; +import fsp from 'node:fs/promises'; +import path from 'node:path'; +import readline from 'node:readline'; + import type { IProviderSessions } from '@/shared/interfaces.js'; import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js'; import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js'; +import { sessionsDb } from '@/modules/database/index.js'; const PROVIDER = 'claude'; @@ -15,17 +20,184 @@ type ClaudeToolResult = { type ClaudeHistoryResult = | AnyRecord[] | { - messages?: AnyRecord[]; - total?: number; - hasMore?: boolean; - }; + messages?: AnyRecord[]; + total?: number; + hasMore?: boolean; + }; -const loadClaudeSessionMessages = getSessionMessages as unknown as ( - projectName: string, +type ClaudeHistoryMessagesResult = + | AnyRecord[] + | { + messages: AnyRecord[]; + total: number; + hasMore: boolean; + offset?: number; + limit?: number | null; + }; + +async function parseAgentTools(filePath: string): Promise { + const tools: AnyRecord[] = []; + + try { + const fileStream = fs.createReadStream(filePath); + const rl = readline.createInterface({ + input: fileStream, + crlfDelay: Infinity, + }); + + for await (const line of rl) { + if (!line.trim()) { + continue; + } + + try { + const entry = JSON.parse(line) as AnyRecord; + + if (entry.message?.role === 'assistant' && Array.isArray(entry.message?.content)) { + for (const part of entry.message.content as AnyRecord[]) { + if (part.type === 'tool_use') { + tools.push({ + toolId: part.id, + toolName: part.name, + toolInput: part.input, + timestamp: entry.timestamp, + }); + } + } + } + + if (entry.message?.role === 'user' && Array.isArray(entry.message?.content)) { + for (const part of entry.message.content as AnyRecord[]) { + if (part.type !== 'tool_result') { + continue; + } + + const tool = tools.find((candidate) => candidate.toolId === part.tool_use_id); + if (!tool) { + continue; + } + + tool.toolResult = { + content: typeof part.content === 'string' + ? part.content + : Array.isArray(part.content) + ? part.content + .map((contentPart: AnyRecord) => contentPart?.text || '') + .join('\n') + : JSON.stringify(part.content), + isError: Boolean(part.is_error), + }; + } + } + } catch { + // Skip malformed lines that can happen during concurrent writes. + } + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.warn(`Error parsing agent file ${filePath}:`, message); + } + + return tools; +} + +async function getSessionMessages( sessionId: string, limit: number | null, offset: number, -) => Promise; +): Promise { + try { + const jsonLPath = sessionsDb.getSessionById(sessionId)?.jsonl_path; + + if (!jsonLPath) { + return { messages: [], total: 0, hasMore: false }; + } + + const projectDir = path.dirname(jsonLPath); + const files = await fsp.readdir(projectDir); + const agentFiles = files.filter((file) => file.endsWith('.jsonl') && file.startsWith('agent-')); + + const messages: AnyRecord[] = []; + const agentToolsCache = new Map(); + + const fileStream = fs.createReadStream(jsonLPath); + const rl = readline.createInterface({ + input: fileStream, + crlfDelay: Infinity, + }); + + for await (const line of rl) { + if (!line.trim()) { + continue; + } + + try { + const entry = JSON.parse(line) as AnyRecord; + if (entry.sessionId === sessionId) { + messages.push(entry); + } + } catch { + // Skip malformed JSONL lines that can happen during concurrent writes. + } + } + + const agentIds = new Set(); + for (const message of messages) { + const agentId = message.toolUseResult?.agentId; + if (agentId) { + agentIds.add(String(agentId)); + } + } + + for (const agentId of agentIds) { + const agentFileName = `agent-${agentId}.jsonl`; + if (!agentFiles.includes(agentFileName)) { + continue; + } + + const agentFilePath = path.join(projectDir, agentFileName); + const tools = await parseAgentTools(agentFilePath); + agentToolsCache.set(agentId, tools); + } + + for (const message of messages) { + const agentId = message.toolUseResult?.agentId; + if (!agentId) { + continue; + } + + const agentTools = agentToolsCache.get(String(agentId)); + if (agentTools && agentTools.length > 0) { + message.subagentTools = agentTools; + } + } + + const sortedMessages = messages.sort( + (a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(), + ); + const total = sortedMessages.length; + + if (limit === null) { + return sortedMessages; + } + + const startIndex = Math.max(0, total - offset - limit); + const endIndex = total - offset; + const paginatedMessages = sortedMessages.slice(startIndex, endIndex); + const hasMore = startIndex > 0; + + return { + messages: paginatedMessages, + total, + hasMore, + offset, + limit, + }; + } catch (error) { + console.error(`Error reading messages for session ${sessionId}:`, error); + return limit === null ? [] : { messages: [], total: 0, hasMore: false }; + } +} /** * Claude writes internal command and system reminder entries into history. @@ -238,14 +410,11 @@ export class ClaudeSessionsProvider implements IProviderSessions { sessionId: string, options: FetchHistoryOptions = {}, ): Promise { - const { projectName, limit = null, offset = 0 } = options; - if (!projectName) { - return { messages: [], total: 0, hasMore: false, offset: 0, limit: null }; - } + const { limit = null, offset = 0 } = options; let result: ClaudeHistoryResult; try { - result = await loadClaudeSessionMessages(projectName, sessionId, limit, offset); + result = await getSessionMessages(sessionId, limit, offset); } catch (error) { const message = error instanceof Error ? error.message : String(error); console.warn(`[ClaudeProvider] Failed to load session ${sessionId}:`, message); diff --git a/server/modules/providers/list/claude/claude.provider.ts b/server/modules/providers/list/claude/claude.provider.ts index 675d82dd..eeec1eb4 100644 --- a/server/modules/providers/list/claude/claude.provider.ts +++ b/server/modules/providers/list/claude/claude.provider.ts @@ -1,13 +1,15 @@ import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js'; import { ClaudeProviderAuth } from '@/modules/providers/list/claude/claude-auth.provider.js'; import { ClaudeMcpProvider } from '@/modules/providers/list/claude/claude-mcp.provider.js'; +import { ClaudeSessionSynchronizer } from '@/modules/providers/list/claude/claude-session-synchronizer.provider.js'; import { ClaudeSessionsProvider } from '@/modules/providers/list/claude/claude-sessions.provider.js'; -import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js'; +import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js'; export class ClaudeProvider extends AbstractProvider { readonly mcp = new ClaudeMcpProvider(); readonly auth: IProviderAuth = new ClaudeProviderAuth(); readonly sessions: IProviderSessions = new ClaudeSessionsProvider(); + readonly sessionSynchronizer: IProviderSessionSynchronizer = new ClaudeSessionSynchronizer(); constructor() { super('claude'); diff --git a/server/modules/providers/list/codex/codex-session-synchronizer.provider.ts b/server/modules/providers/list/codex/codex-session-synchronizer.provider.ts new file mode 100644 index 00000000..bd1edc0c --- /dev/null +++ b/server/modules/providers/list/codex/codex-session-synchronizer.provider.ts @@ -0,0 +1,119 @@ +import os from 'node:os'; +import path from 'node:path'; + +import { sessionsDb } from '@/modules/database/index.js'; +import { + buildLookupMap, + extractFirstValidJsonlData, + findFilesRecursivelyCreatedAfter, + normalizeSessionName, + readFileTimestamps, +} from '@/shared/utils.js'; +import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js'; + +type ParsedSession = { + sessionId: string; + projectPath: string; + sessionName?: string; +}; + +/** + * Session indexer for Codex transcript artifacts. + */ +export class CodexSessionSynchronizer implements IProviderSessionSynchronizer { + private readonly provider = 'codex' as const; + private readonly codexHome = path.join(os.homedir(), '.codex'); + + /** + * Scans ~/.codex/sessions and upserts discovered sessions into DB. + */ + async synchronize(since?: Date): Promise { + const nameMap = await buildLookupMap(path.join(this.codexHome, 'session_index.jsonl'), 'id', 'thread_name'); + const files = await findFilesRecursivelyCreatedAfter( + path.join(this.codexHome, 'sessions'), + '.jsonl', + since ?? null + ); + + let processed = 0; + for (const filePath of files) { + const parsed = await this.processSessionFile(filePath, nameMap); + if (!parsed) { + continue; + } + + const existingSession = sessionsDb.getSessionById(parsed.sessionId); + if (existingSession) { + // If session name is untitled and we now have a name, update it + if (existingSession.custom_name === 'Untitled Codex Session' && parsed.sessionName && parsed.sessionName !== 'Untitled Codex Session') { + sessionsDb.updateSessionCustomName(parsed.sessionId, parsed.sessionName); + } + } + + const timestamps = await readFileTimestamps(filePath); + sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + processed += 1; + } + + return processed; + } + + /** + * Parses and upserts one Codex session JSONL file. + */ + async synchronizeFile(filePath: string): Promise { + if (!filePath.endsWith('.jsonl')) { + return null; + } + + const nameMap = await buildLookupMap(path.join(this.codexHome, 'session_index.jsonl'), 'id', 'thread_name'); + const parsed = await this.processSessionFile(filePath, nameMap); + if (!parsed) { + return null; + } + + const timestamps = await readFileTimestamps(filePath); + return sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + } + + /** + * Extracts session metadata from one Codex JSONL session file. + */ + private async processSessionFile( + filePath: string, + nameMap: Map + ): Promise { + return extractFirstValidJsonlData(filePath, (rawData) => { + const data = rawData as Record; + const payload = data.payload as Record | undefined; + const sessionId = typeof payload?.id === 'string' ? payload.id : undefined; + const projectPath = typeof payload?.cwd === 'string' ? payload.cwd : undefined; + + if (!sessionId || !projectPath) { + return null; + } + + return { + sessionId, + projectPath, + sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Codex Session'), + }; + }); + } +} diff --git a/server/modules/providers/list/codex/codex-sessions.provider.ts b/server/modules/providers/list/codex/codex-sessions.provider.ts index 1ea986f7..a7fe8129 100644 --- a/server/modules/providers/list/codex/codex-sessions.provider.ts +++ b/server/modules/providers/list/codex/codex-sessions.provider.ts @@ -1,4 +1,7 @@ -import { getCodexSessionMessages } from '@/projects.js'; +import fsSync from 'node:fs'; +import readline from 'node:readline'; + +import { sessionsDb } from '@/modules/database/index.js'; import type { IProviderSessions } from '@/shared/interfaces.js'; import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js'; import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js'; @@ -11,14 +14,250 @@ type CodexHistoryResult = messages?: AnyRecord[]; total?: number; hasMore?: boolean; + offset?: number; + limit?: number | null; tokenUsage?: unknown; }; -const loadCodexSessionMessages = getCodexSessionMessages as unknown as ( +function isVisibleCodexUserMessage(payload: AnyRecord | null | undefined): boolean { + if (!payload || payload.type !== 'user_message') { + return false; + } + + if (payload.kind && payload.kind !== 'plain') { + return false; + } + + return typeof payload.message === 'string' && payload.message.trim().length > 0; +} + +function extractCodexTextContent(content: unknown): string { + if (!Array.isArray(content)) { + return typeof content === 'string' ? content : ''; + } + + return content + .map((item) => { + if (!item || typeof item !== 'object') { + return ''; + } + + const record = item as AnyRecord; + if ( + (record.type === 'input_text' || record.type === 'output_text' || record.type === 'text') + && typeof record.text === 'string' + ) { + return record.text; + } + + return ''; + }) + .filter(Boolean) + .join('\n'); +} + +async function getCodexSessionMessages( sessionId: string, - limit: number | null, - offset: number, -) => Promise; + limit: number | null = null, + offset = 0, +): Promise { + try { + const sessionFilePath = sessionsDb.getSessionById(sessionId)?.jsonl_path; + + if (!sessionFilePath) { + console.warn(`Codex session file not found for session ${sessionId}`); + return { messages: [], total: 0, hasMore: false }; + } + + const messages: AnyRecord[] = []; + let tokenUsage: AnyRecord | null = null; + const fileStream = fsSync.createReadStream(sessionFilePath); + const rl = readline.createInterface({ + input: fileStream, + crlfDelay: Infinity, + }); + + for await (const line of rl) { + if (!line.trim()) { + continue; + } + + try { + const entry = JSON.parse(line) as AnyRecord; + + if (entry.type === 'event_msg' && entry.payload?.type === 'token_count' && entry.payload?.info) { + const info = entry.payload.info as AnyRecord; + if (info.total_token_usage) { + const usage = info.total_token_usage as AnyRecord; + tokenUsage = { + used: usage.total_tokens || 0, + total: info.model_context_window || 200000, + }; + } + } + + if (entry.type === 'event_msg' && isVisibleCodexUserMessage(entry.payload as AnyRecord)) { + messages.push({ + type: 'user', + timestamp: entry.timestamp, + message: { + role: 'user', + content: entry.payload.message, + }, + }); + } + + if ( + entry.type === 'response_item' && + entry.payload?.type === 'message' && + entry.payload.role === 'assistant' + ) { + const textContent = extractCodexTextContent(entry.payload.content); + if (textContent.trim()) { + messages.push({ + type: 'assistant', + timestamp: entry.timestamp, + message: { + role: 'assistant', + content: textContent, + }, + }); + } + } + + if (entry.type === 'response_item' && entry.payload?.type === 'reasoning') { + const summaryText = Array.isArray(entry.payload.summary) + ? entry.payload.summary + .map((item: AnyRecord) => item?.text) + .filter(Boolean) + .join('\n') + : ''; + + if (summaryText.trim()) { + messages.push({ + type: 'thinking', + timestamp: entry.timestamp, + message: { + role: 'assistant', + content: summaryText, + }, + }); + } + } + + if (entry.type === 'response_item' && entry.payload?.type === 'function_call') { + let toolName = entry.payload.name; + let toolInput = entry.payload.arguments; + + if (toolName === 'shell_command') { + toolName = 'Bash'; + try { + const args = JSON.parse(entry.payload.arguments) as AnyRecord; + toolInput = JSON.stringify({ command: args.command }); + } catch { + // Keep original arguments when parsing fails. + } + } + + messages.push({ + type: 'tool_use', + timestamp: entry.timestamp, + toolName, + toolInput, + toolCallId: entry.payload.call_id, + }); + } + + if (entry.type === 'response_item' && entry.payload?.type === 'function_call_output') { + messages.push({ + type: 'tool_result', + timestamp: entry.timestamp, + toolCallId: entry.payload.call_id, + output: entry.payload.output, + }); + } + + if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call') { + const toolName = entry.payload.name || 'custom_tool'; + const input = entry.payload.input || ''; + + if (toolName === 'apply_patch') { + const fileMatch = String(input).match(/\*\*\* Update File: (.+)/); + const filePath = fileMatch ? fileMatch[1].trim() : 'unknown'; + const lines = String(input).split('\n'); + const oldLines: string[] = []; + const newLines: string[] = []; + + for (const lineContent of lines) { + if (lineContent.startsWith('-') && !lineContent.startsWith('---')) { + oldLines.push(lineContent.slice(1)); + } else if (lineContent.startsWith('+') && !lineContent.startsWith('+++')) { + newLines.push(lineContent.slice(1)); + } + } + + messages.push({ + type: 'tool_use', + timestamp: entry.timestamp, + toolName: 'Edit', + toolInput: JSON.stringify({ + file_path: filePath, + old_string: oldLines.join('\n'), + new_string: newLines.join('\n'), + }), + toolCallId: entry.payload.call_id, + }); + } else { + messages.push({ + type: 'tool_use', + timestamp: entry.timestamp, + toolName, + toolInput: input, + toolCallId: entry.payload.call_id, + }); + } + } + + if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call_output') { + messages.push({ + type: 'tool_result', + timestamp: entry.timestamp, + toolCallId: entry.payload.call_id, + output: entry.payload.output || '', + }); + } + } catch { + // Skip malformed lines. + } + } + + messages.sort( + (a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(), + ); + const total = messages.length; + + if (limit !== null) { + const startIndex = Math.max(0, total - offset - limit); + const endIndex = total - offset; + const paginatedMessages = messages.slice(startIndex, endIndex); + const hasMore = startIndex > 0; + + return { + messages: paginatedMessages, + total, + hasMore, + offset, + limit, + tokenUsage, + }; + } + + return { messages, tokenUsage }; + } catch (error) { + console.error(`Error reading Codex session messages for ${sessionId}:`, error); + return { messages: [], total: 0, hasMore: false }; + } +} export class CodexSessionsProvider implements IProviderSessions { /** @@ -31,6 +270,23 @@ export class CodexSessionsProvider implements IProviderSessions { const ts = raw.timestamp || new Date().toISOString(); const baseId = raw.uuid || generateMessageId('codex'); + if (raw.type === 'thinking' || raw.isReasoning) { + const thinkingContent = typeof raw.message?.content === 'string' + ? raw.message.content + : ''; + if (!thinkingContent.trim()) { + return []; + } + return [createNormalizedMessage({ + id: baseId, + sessionId, + timestamp: ts, + provider: PROVIDER, + kind: 'thinking', + content: thinkingContent, + })]; + } + if (raw.message?.role === 'user') { const content = typeof raw.message.content === 'string' ? raw.message.content @@ -77,17 +333,6 @@ export class CodexSessionsProvider implements IProviderSessions { })]; } - if (raw.type === 'thinking' || raw.isReasoning) { - return [createNormalizedMessage({ - id: baseId, - sessionId, - timestamp: ts, - provider: PROVIDER, - kind: 'thinking', - content: raw.message?.content || '', - })]; - } - if (raw.type === 'tool_use' || raw.toolName) { return [createNormalizedMessage({ id: baseId, @@ -275,7 +520,7 @@ export class CodexSessionsProvider implements IProviderSessions { let result: CodexHistoryResult; try { - result = await loadCodexSessionMessages(sessionId, limit, offset); + result = await getCodexSessionMessages(sessionId, limit, offset); } catch (error) { const message = error instanceof Error ? error.message : String(error); console.warn(`[CodexProvider] Failed to load session ${sessionId}:`, message); diff --git a/server/modules/providers/list/codex/codex.provider.ts b/server/modules/providers/list/codex/codex.provider.ts index fe1b9eb5..593297bc 100644 --- a/server/modules/providers/list/codex/codex.provider.ts +++ b/server/modules/providers/list/codex/codex.provider.ts @@ -1,13 +1,15 @@ import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js'; import { CodexProviderAuth } from '@/modules/providers/list/codex/codex-auth.provider.js'; import { CodexMcpProvider } from '@/modules/providers/list/codex/codex-mcp.provider.js'; +import { CodexSessionSynchronizer } from '@/modules/providers/list/codex/codex-session-synchronizer.provider.js'; import { CodexSessionsProvider } from '@/modules/providers/list/codex/codex-sessions.provider.js'; -import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js'; +import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js'; export class CodexProvider extends AbstractProvider { readonly mcp = new CodexMcpProvider(); readonly auth: IProviderAuth = new CodexProviderAuth(); readonly sessions: IProviderSessions = new CodexSessionsProvider(); + readonly sessionSynchronizer: IProviderSessionSynchronizer = new CodexSessionSynchronizer(); constructor() { super('codex'); diff --git a/server/modules/providers/list/cursor/cursor-session-synchronizer.provider.ts b/server/modules/providers/list/cursor/cursor-session-synchronizer.provider.ts new file mode 100644 index 00000000..4be02dee --- /dev/null +++ b/server/modules/providers/list/cursor/cursor-session-synchronizer.provider.ts @@ -0,0 +1,176 @@ +import crypto from 'node:crypto'; +import fs from 'node:fs'; +import fsp from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import readline from 'node:readline'; + +import { sessionsDb } from '@/modules/database/index.js'; +import { + extractFirstValidJsonlData, + findFilesRecursivelyCreatedAfter, + normalizeSessionName, + readFileTimestamps, +} from '@/shared/utils.js'; +import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js'; + +type ParsedSession = { + sessionId: string; + projectPath: string; + sessionName?: string; +}; + +/** + * Returns directory entries or an empty list when the folder is missing. + */ +async function listDirectoryEntriesSafe( + directoryPath: string +): Promise { + try { + return await fsp.readdir(directoryPath, { withFileTypes: true }); + } catch { + return []; + } +} + +/** + * Session indexer for Cursor transcript artifacts. + */ +export class CursorSessionSynchronizer implements IProviderSessionSynchronizer { + private readonly provider = 'cursor' as const; + private readonly cursorHome = path.join(os.homedir(), '.cursor'); + + /** + * Scans Cursor chats and upserts discovered sessions into DB. + */ + async synchronize(since?: Date): Promise { + const projectsDir = path.join(this.cursorHome, 'projects'); + const projectEntries = await listDirectoryEntriesSafe(projectsDir); + const seenProjectPaths = new Set(); + + let processed = 0; + for (const entry of projectEntries) { + if (!entry.isDirectory()) { + continue; + } + + const workerLogPath = path.join(projectsDir, entry.name, 'worker.log'); + const projectPath = await this.extractProjectPathFromWorkerLog(workerLogPath); + if (!projectPath || seenProjectPaths.has(projectPath)) { + continue; + } + + seenProjectPaths.add(projectPath); + const projectHash = this.md5(projectPath); + const chatsDir = path.join(this.cursorHome, 'chats', projectHash); + const files = await findFilesRecursivelyCreatedAfter(chatsDir, '.jsonl', since ?? null); + + for (const filePath of files) { + const parsed = await this.processSessionFile(filePath); + if (!parsed) { + continue; + } + + const timestamps = await readFileTimestamps(filePath); + sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + processed += 1; + } + } + + return processed; + } + + /** + * Parses and upserts one Cursor session JSONL file. + */ + async synchronizeFile(filePath: string): Promise { + if (!filePath.endsWith('.jsonl')) { + return null; + } + + const parsed = await this.processSessionFile(filePath); + if (!parsed) { + return null; + } + + const timestamps = await readFileTimestamps(filePath); + return sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + } + + /** + * Produces the same project hash Cursor uses in chat directory names. + */ + private md5(input: string): string { + return crypto.createHash('md5').update(input).digest('hex'); + } + + /** + * Extracts project path from Cursor worker.log. + */ + private async extractProjectPathFromWorkerLog(filePath: string): Promise { + try { + const fileStream = fs.createReadStream(filePath, { encoding: 'utf8' }); + const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity }); + + for await (const line of lineReader) { + const match = line.match(/workspacePath=(.*)$/); + const projectPath = match?.[1]?.trim(); + if (projectPath) { + lineReader.close(); + fileStream.close(); + return projectPath; + } + } + } catch { + // Missing worker logs are valid for partial or incomplete session data. + } + + return null; + } + + /** + * Extracts session metadata from one Cursor JSONL session file. + */ + private async processSessionFile(filePath: string): Promise { + const sessionId = path.basename(filePath, '.jsonl'); + const grandparentDir = path.dirname(path.dirname(filePath)); + const workerLogPath = path.join(grandparentDir, 'worker.log'); + const projectPath = await this.extractProjectPathFromWorkerLog(workerLogPath); + + if (!projectPath) { + return null; + } + + return extractFirstValidJsonlData(filePath, (rawData) => { + const data = rawData as Record; + if (data.role !== 'user') { + return null; + } + + const text = typeof data.message?.content?.[0]?.text === 'string' ? data.message.content[0].text : ''; + const firstLine = text.replace(/<\/?user_query>/g, '').trim().split('\n')[0]; + + return { + sessionId, + projectPath, + sessionName: normalizeSessionName(firstLine, 'Untitled Cursor Session'), + }; + }); + } +} diff --git a/server/modules/providers/list/cursor/cursor.provider.ts b/server/modules/providers/list/cursor/cursor.provider.ts index 7e834a10..72edf80c 100644 --- a/server/modules/providers/list/cursor/cursor.provider.ts +++ b/server/modules/providers/list/cursor/cursor.provider.ts @@ -1,13 +1,15 @@ import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js'; import { CursorProviderAuth } from '@/modules/providers/list/cursor/cursor-auth.provider.js'; import { CursorMcpProvider } from '@/modules/providers/list/cursor/cursor-mcp.provider.js'; +import { CursorSessionSynchronizer } from '@/modules/providers/list/cursor/cursor-session-synchronizer.provider.js'; import { CursorSessionsProvider } from '@/modules/providers/list/cursor/cursor-sessions.provider.js'; -import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js'; +import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js'; export class CursorProvider extends AbstractProvider { readonly mcp = new CursorMcpProvider(); readonly auth: IProviderAuth = new CursorProviderAuth(); readonly sessions: IProviderSessions = new CursorSessionsProvider(); + readonly sessionSynchronizer: IProviderSessionSynchronizer = new CursorSessionSynchronizer(); constructor() { super('cursor'); diff --git a/server/modules/providers/list/gemini/gemini-session-synchronizer.provider.ts b/server/modules/providers/list/gemini/gemini-session-synchronizer.provider.ts new file mode 100644 index 00000000..52c62e9b --- /dev/null +++ b/server/modules/providers/list/gemini/gemini-session-synchronizer.provider.ts @@ -0,0 +1,401 @@ +import crypto from 'node:crypto'; +import os from 'node:os'; +import path from 'node:path'; +import { readFile } from 'node:fs/promises'; + +import { projectsDb, sessionsDb } from '@/modules/database/index.js'; +import { + findFilesRecursivelyCreatedAfter, + normalizeProjectPath, + normalizeSessionName, + readFileTimestamps, +} from '@/shared/utils.js'; +import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js'; +import type { AnyRecord } from '@/shared/types.js'; + +type ParsedSession = { + sessionId: string; + projectPath: string; + sessionName?: string; +}; + +type GeminiJsonlMetadata = { + sessionId: string; + projectPath?: string; + projectHash?: string; + firstUserMessage?: string; +}; + +/** + * Session indexer for Gemini transcript artifacts. + */ +export class GeminiSessionSynchronizer implements IProviderSessionSynchronizer { + private readonly provider = 'gemini' as const; + private readonly geminiHome = path.join(os.homedir(), '.gemini'); + + /** + * Scans Gemini legacy JSON and new JSONL artifacts and upserts sessions into DB. + */ + async synchronize(since?: Date): Promise { + const projectHashLookup = this.buildProjectHashLookup(); + + const legacySessionFiles = await findFilesRecursivelyCreatedAfter( + path.join(this.geminiHome, 'sessions'), + '.json', + since ?? null + ); + const legacyTempFiles = await findFilesRecursivelyCreatedAfter( + path.join(this.geminiHome, 'tmp'), + '.json', + since ?? null + ); + const jsonlSessionFiles = await findFilesRecursivelyCreatedAfter( + path.join(this.geminiHome, 'sessions'), + '.jsonl', + since ?? null + ); + const jsonlTempFiles = await findFilesRecursivelyCreatedAfter( + path.join(this.geminiHome, 'tmp'), + '.jsonl', + since ?? null + ); + + // Process legacy JSON first, then JSONL. If both exist for a session id, + // the JSONL artifact becomes the canonical jsonl_path via upsert. + const files = [ + ...legacySessionFiles, + ...legacyTempFiles, + ...jsonlSessionFiles, + ...jsonlTempFiles, + ]; + + let processed = 0; + for (const filePath of files) { + if (this.shouldSkipTempArtifact(filePath)) { + continue; + } + + const parsed = filePath.endsWith('.jsonl') + ? await this.processJsonlSessionFile(filePath, projectHashLookup) + : await this.processLegacySessionFile(filePath); + if (!parsed) { + continue; + } + + const timestamps = await readFileTimestamps(filePath); + sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + processed += 1; + } + + return processed; + } + + /** + * Parses and upserts one Gemini legacy JSON or JSONL artifact. + */ + async synchronizeFile(filePath: string): Promise { + if (!filePath.endsWith('.json') && !filePath.endsWith('.jsonl')) { + return null; + } + + if (this.shouldSkipTempArtifact(filePath)) { + return null; + } + + const parsed = filePath.endsWith('.jsonl') + ? await this.processJsonlSessionFile(filePath, this.buildProjectHashLookup()) + : await this.processLegacySessionFile(filePath); + if (!parsed) { + return null; + } + + const timestamps = await readFileTimestamps(filePath); + return sessionsDb.createSession( + parsed.sessionId, + this.provider, + parsed.projectPath, + parsed.sessionName, + timestamps.createdAt, + timestamps.updatedAt, + filePath + ); + } + + /** + * Extracts session metadata from one Gemini legacy JSON artifact. + */ + private async processLegacySessionFile(filePath: string): Promise { + try { + const content = await readFile(filePath, 'utf8'); + const data = JSON.parse(content) as AnyRecord; + + const sessionId = + typeof data.sessionId === 'string' + ? data.sessionId + : typeof data.id === 'string' + ? data.id + : undefined; + if (!sessionId) { + return null; + } + + const workspaceProjectPath = await this.resolveProjectPathFromChatWorkspace(filePath); + const projectPath = typeof data.projectPath === 'string' && data.projectPath.trim().length > 0 + ? data.projectPath + : workspaceProjectPath; + if (!projectPath) { + return null; + } + + const messages = Array.isArray(data.messages) ? data.messages : []; + const firstMessage = messages[0] as AnyRecord | undefined; + let rawName: string | undefined; + + if (Array.isArray(firstMessage?.content) && typeof firstMessage.content[0]?.text === 'string') { + rawName = firstMessage.content[0].text; + } else if (typeof firstMessage?.content === 'string') { + rawName = firstMessage.content; + } + + return { + sessionId, + projectPath, + sessionName: normalizeSessionName(rawName, 'New Gemini Chat'), + }; + } catch { + return null; + } + } + + /** + * Extracts session metadata from one Gemini JSONL artifact. + */ + private async processJsonlSessionFile( + filePath: string, + projectHashLookup: Map + ): Promise { + const metadata = await this.extractJsonlMetadata(filePath); + if (!metadata) { + return null; + } + + let projectPath = typeof metadata.projectPath === 'string' ? metadata.projectPath.trim() : ''; + if (!projectPath) { + const workspaceProjectPath = await this.resolveProjectPathFromChatWorkspace(filePath); + if (workspaceProjectPath) { + projectPath = workspaceProjectPath; + } + } + if (!projectPath && typeof metadata.projectHash === 'string') { + projectPath = projectHashLookup.get(metadata.projectHash.trim().toLowerCase()) ?? ''; + } + if (!projectPath) { + return null; + } + + // Once we resolve a project hash/path pair, keep it in-memory for this sync run. + if (typeof metadata.projectHash === 'string' && metadata.projectHash.trim()) { + projectHashLookup.set(metadata.projectHash.trim().toLowerCase(), projectPath); + } + + return { + sessionId: metadata.sessionId, + projectPath, + sessionName: normalizeSessionName(metadata.firstUserMessage, 'New Gemini Chat'), + }; + } + + /** + * Reads first useful metadata from Gemini JSONL files. + */ + private async extractJsonlMetadata(filePath: string): Promise { + try { + const content = await readFile(filePath, 'utf8'); + const lines = content.split('\n'); + + let sessionId: string | undefined; + let projectPath: string | undefined; + let projectHash: string | undefined; + let firstUserMessage: string | undefined; + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed) { + continue; + } + + let parsed: AnyRecord; + try { + parsed = JSON.parse(trimmed) as AnyRecord; + } catch { + continue; + } + + if (!sessionId && typeof parsed.sessionId === 'string') { + sessionId = parsed.sessionId; + } + if (!projectPath && typeof parsed.projectPath === 'string') { + projectPath = parsed.projectPath; + } + if (!projectHash && typeof parsed.projectHash === 'string') { + projectHash = parsed.projectHash; + } + + if (!firstUserMessage && parsed.type === 'user') { + firstUserMessage = this.extractGeminiTextContent(parsed.content); + } + + if (sessionId && (projectPath || projectHash) && firstUserMessage) { + break; + } + } + + if (!sessionId) { + return null; + } + + return { + sessionId, + projectPath, + projectHash, + firstUserMessage, + }; + } catch { + return null; + } + } + + /** + * Tries to resolve project root from Gemini tmp chat workspaces. + */ + private async resolveProjectPathFromChatWorkspace(filePath: string): Promise { + if (!filePath.includes(`${path.sep}chats${path.sep}`)) { + return ''; + } + + const chatsDir = path.dirname(filePath); + const workspaceDir = path.dirname(chatsDir); + const projectRootPath = path.join(workspaceDir, '.project_root'); + + try { + const rootContent = await readFile(projectRootPath, 'utf8'); + return rootContent.trim(); + } catch { + return ''; + } + } + + /** + * Builds a hash->path lookup for Gemini JSONL metadata that stores projectHash. + */ + private buildProjectHashLookup(): Map { + const lookup = new Map(); + const knownPaths = new Set(); + + for (const project of projectsDb.getProjectPaths()) { + if (typeof project.project_path === 'string' && project.project_path.trim()) { + knownPaths.add(project.project_path.trim()); + } + } + + for (const session of sessionsDb.getAllSessions()) { + if (session.provider === this.provider && typeof session.project_path === 'string' && session.project_path.trim()) { + knownPaths.add(session.project_path.trim()); + } + } + + for (const knownPath of knownPaths) { + this.addProjectHashCandidates(lookup, knownPath); + } + + return lookup; + } + + /** + * Adds likely Gemini hash variants for one project path. + */ + private addProjectHashCandidates(lookup: Map, projectPath: string): void { + const trimmed = projectPath.trim(); + if (!trimmed) { + return; + } + + const normalized = normalizeProjectPath(trimmed); + const resolved = path.resolve(trimmed); + const resolvedNormalized = normalizeProjectPath(resolved); + + const candidates = new Set([ + trimmed, + normalized, + resolved, + resolvedNormalized, + ]); + + if (process.platform === 'win32') { + for (const candidate of [...candidates]) { + candidates.add(candidate.toLowerCase()); + } + } + + for (const candidate of candidates) { + if (!candidate) { + continue; + } + + const hash = this.sha256(candidate); + if (!lookup.has(hash)) { + lookup.set(hash, trimmed); + } + } + } + + /** + * Returns first user text from Gemini content payload shapes. + */ + private extractGeminiTextContent(content: unknown): string | undefined { + if (typeof content === 'string' && content.trim().length > 0) { + return content; + } + + if (!Array.isArray(content)) { + return undefined; + } + + for (const part of content) { + if (typeof part === 'string' && part.trim().length > 0) { + return part; + } + + if (part && typeof part === 'object' && typeof (part as AnyRecord).text === 'string') { + const text = (part as AnyRecord).text; + if (text.trim().length > 0) { + return text; + } + } + } + + return undefined; + } + + /** + * Keeps tmp scanning scoped to chat artifacts only. + */ + private shouldSkipTempArtifact(filePath: string): boolean { + return ( + filePath.startsWith(path.join(this.geminiHome, 'tmp')) + && !filePath.includes(`${path.sep}chats${path.sep}`) + ); + } + + private sha256(value: string): string { + return crypto.createHash('sha256').update(value).digest('hex'); + } +} diff --git a/server/modules/providers/list/gemini/gemini-sessions.provider.ts b/server/modules/providers/list/gemini/gemini-sessions.provider.ts index 7d5b5f1a..606a1f17 100644 --- a/server/modules/providers/list/gemini/gemini-sessions.provider.ts +++ b/server/modules/providers/list/gemini/gemini-sessions.provider.ts @@ -1,11 +1,249 @@ -import sessionManager from '@/sessionManager.js'; -import { getGeminiCliSessionMessages } from '@/projects.js'; +import fsSync from 'node:fs'; +import fs from 'node:fs/promises'; +import readline from 'node:readline'; + +import { sessionsDb } from '@/modules/database/index.js'; import type { IProviderSessions } from '@/shared/interfaces.js'; import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js'; import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js'; const PROVIDER = 'gemini'; +type GeminiHistoryResult = { + messages: AnyRecord[]; + tokenUsage?: unknown; +}; + +function mapGeminiRole(value: unknown): 'user' | 'assistant' | null { + if (value === 'user') { + return 'user'; + } + + if (value === 'gemini' || value === 'assistant') { + return 'assistant'; + } + + return null; +} + +function extractGeminiTextContent(content: unknown): string { + if (typeof content === 'string') { + return content; + } + + if (!Array.isArray(content)) { + return ''; + } + + return content + .map((part) => { + if (typeof part === 'string') { + return part; + } + if (!part || typeof part !== 'object') { + return ''; + } + + const record = part as AnyRecord; + if (typeof record.text === 'string') { + return record.text; + } + + return ''; + }) + .filter(Boolean) + .join('\n'); +} + +function extractGeminiThoughts(thoughts: unknown): string { + if (!Array.isArray(thoughts)) { + return ''; + } + + return thoughts + .map((item) => { + if (!item || typeof item !== 'object') { + return ''; + } + + const record = item as AnyRecord; + const subject = typeof record.subject === 'string' ? record.subject.trim() : ''; + const description = typeof record.description === 'string' ? record.description.trim() : ''; + + if (subject && description) { + return `${subject}: ${description}`; + } + + return description || subject; + }) + .filter(Boolean) + .join('\n'); +} + +function buildGeminiTokenUsage(tokens: unknown): AnyRecord | undefined { + if (!tokens || typeof tokens !== 'object') { + return undefined; + } + + const record = tokens as AnyRecord; + const input = Number(record.input || 0); + const output = Number(record.output || 0); + const cached = Number(record.cached || 0); + const thoughts = Number(record.thoughts || 0); + const tool = Number(record.tool || 0); + + const totalFromFields = input + output + cached + thoughts + tool; + const total = Number(record.total || totalFromFields || 0); + + return { + used: total, + total: total, + breakdown: { + input, + output, + cached, + thoughts, + tool, + }, + }; +} + +async function getGeminiLegacySessionMessages(sessionFilePath: string): Promise { + try { + const data = await fs.readFile(sessionFilePath, 'utf8'); + const session = JSON.parse(data) as AnyRecord; + const sourceMessages = Array.isArray(session.messages) ? session.messages : []; + + const messages: AnyRecord[] = []; + for (const msg of sourceMessages) { + const role = mapGeminiRole(msg.type ?? msg.role); + if (!role) { + continue; + } + + messages.push({ + type: 'message', + uuid: typeof msg.id === 'string' ? msg.id : undefined, + message: { role, content: msg.content }, + timestamp: msg.timestamp || null, + }); + } + + return { messages }; + } catch { + return { messages: [] }; + } +} + +async function getGeminiJsonlSessionMessages(sessionFilePath: string): Promise { + const messages: AnyRecord[] = []; + let tokenUsage: AnyRecord | undefined; + + try { + const fileStream = fsSync.createReadStream(sessionFilePath); + const lineReader = readline.createInterface({ + input: fileStream, + crlfDelay: Infinity, + }); + + for await (const line of lineReader) { + const trimmed = line.trim(); + if (!trimmed) { + continue; + } + + let entry: AnyRecord; + try { + entry = JSON.parse(trimmed) as AnyRecord; + } catch { + continue; + } + + // Metadata/update lines (e.g. {$set:{lastUpdated:...}}) do not represent chat messages. + if (entry.$set) { + continue; + } + + const role = mapGeminiRole(entry.type); + if (role) { + const textContent = extractGeminiTextContent(entry.content); + if (textContent.trim()) { + messages.push({ + type: 'message', + uuid: typeof entry.id === 'string' ? entry.id : undefined, + message: { role, content: textContent }, + timestamp: entry.timestamp || null, + }); + } + + const thinkingContent = extractGeminiThoughts(entry.thoughts); + if (thinkingContent.trim()) { + messages.push({ + type: 'thinking', + uuid: typeof entry.id === 'string' ? `${entry.id}_thinking` : undefined, + message: { role: 'assistant', content: thinkingContent }, + timestamp: entry.timestamp || null, + isReasoning: true, + }); + } + + if (role === 'assistant') { + const usage = buildGeminiTokenUsage(entry.tokens); + if (usage) { + tokenUsage = usage; + } + } + + continue; + } + + if (entry.type === 'tool_use') { + messages.push({ + type: 'tool_use', + uuid: typeof entry.id === 'string' ? entry.id : undefined, + timestamp: entry.timestamp || null, + toolName: entry.tool_name || entry.name || 'Tool', + toolInput: entry.parameters ?? entry.input ?? entry.arguments ?? '', + toolCallId: entry.tool_id || entry.toolCallId || entry.id, + }); + continue; + } + + if (entry.type === 'tool_result') { + messages.push({ + type: 'tool_result', + uuid: typeof entry.id === 'string' ? entry.id : undefined, + timestamp: entry.timestamp || null, + toolCallId: entry.tool_id || entry.toolCallId || entry.id || '', + output: entry.output ?? entry.result ?? '', + isError: Boolean(entry.error) || entry.status === 'error', + }); + } + } + } catch { + return { messages: [] }; + } + + messages.sort( + (a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(), + ); + + return { messages, tokenUsage }; +} + +async function getGeminiCliSessionMessages(sessionId: string): Promise { + const sessionFilePath = sessionsDb.getSessionById(sessionId)?.jsonl_path; + if (!sessionFilePath) { + return { messages: [] }; + } + + if (sessionFilePath.endsWith('.jsonl')) { + return getGeminiJsonlSessionMessages(sessionFilePath); + } + + return getGeminiLegacySessionMessages(sessionFilePath); +} + export class GeminiSessionsProvider implements IProviderSessions { /** * Normalizes live Gemini stream-json events into the shared message shape. @@ -108,8 +346,7 @@ export class GeminiSessionsProvider implements IProviderSessions { } /** - * Loads Gemini history from the in-memory session manager first, then falls - * back to Gemini CLI session files on disk. + * Loads Gemini history from Gemini CLI session files on disk. */ async fetchHistory( sessionId: string, @@ -117,28 +354,73 @@ export class GeminiSessionsProvider implements IProviderSessions { ): Promise { const { limit = null, offset = 0 } = options; - let rawMessages: AnyRecord[]; + let result: GeminiHistoryResult; try { - rawMessages = sessionManager.getSessionMessages(sessionId) as AnyRecord[]; - - if (rawMessages.length === 0) { - rawMessages = await getGeminiCliSessionMessages(sessionId) as AnyRecord[]; - } + result = await getGeminiCliSessionMessages(sessionId); } catch (error) { const message = error instanceof Error ? error.message : String(error); console.warn(`[GeminiProvider] Failed to load session ${sessionId}:`, message); return { messages: [], total: 0, hasMore: false, offset: 0, limit: null }; } + const rawMessages = result.messages; const normalized: NormalizedMessage[] = []; + for (let i = 0; i < rawMessages.length; i++) { const raw = rawMessages[i]; const ts = raw.timestamp || new Date().toISOString(); const baseId = raw.uuid || generateMessageId('gemini'); + if (raw.type === 'thinking' || raw.isReasoning) { + const thinkingContent = typeof raw.message?.content === 'string' + ? raw.message.content + : typeof raw.content === 'string' + ? raw.content + : ''; + + if (thinkingContent.trim()) { + normalized.push(createNormalizedMessage({ + id: baseId, + sessionId, + timestamp: ts, + provider: PROVIDER, + kind: 'thinking', + content: thinkingContent, + })); + } + continue; + } + + if (raw.type === 'tool_use' || raw.toolName) { + normalized.push(createNormalizedMessage({ + id: baseId, + sessionId, + timestamp: ts, + provider: PROVIDER, + kind: 'tool_use', + toolName: raw.toolName || 'Tool', + toolInput: raw.toolInput, + toolId: raw.toolCallId || baseId, + })); + continue; + } + + if (raw.type === 'tool_result') { + normalized.push(createNormalizedMessage({ + id: baseId, + sessionId, + timestamp: ts, + provider: PROVIDER, + kind: 'tool_result', + toolId: raw.toolCallId || '', + content: raw.output === undefined ? '' : String(raw.output), + isError: Boolean(raw.isError), + })); + continue; + } + const role = raw.message?.role || raw.role; const content = raw.message?.content || raw.content; - if (!role || !content) { continue; } @@ -147,8 +429,26 @@ export class GeminiSessionsProvider implements IProviderSessions { if (Array.isArray(content)) { for (let partIdx = 0; partIdx < content.length; partIdx++) { - const part = content[partIdx]; - if (part.type === 'text' && part.text) { + const part = content[partIdx] as AnyRecord | string; + + if (typeof part === 'string' && part.trim()) { + normalized.push(createNormalizedMessage({ + id: `${baseId}_${partIdx}`, + sessionId, + timestamp: ts, + provider: PROVIDER, + kind: 'text', + role: normalizedRole, + content: part, + })); + continue; + } + + if (!part || typeof part !== 'object') { + continue; + } + + if ((part.type === 'text' || !part.type) && typeof part.text === 'string' && part.text.trim()) { normalized.push(createNormalizedMessage({ id: `${baseId}_${partIdx}`, sessionId, @@ -192,6 +492,19 @@ export class GeminiSessionsProvider implements IProviderSessions { role: normalizedRole, content, })); + } else { + const textContent = extractGeminiTextContent(content); + if (textContent.trim()) { + normalized.push(createNormalizedMessage({ + id: baseId, + sessionId, + timestamp: ts, + provider: PROVIDER, + kind: 'text', + role: normalizedRole, + content: textContent, + })); + } } } @@ -222,6 +535,7 @@ export class GeminiSessionsProvider implements IProviderSessions { hasMore: pageLimit === null ? false : start + pageLimit < normalized.length, offset: start, limit: pageLimit, + tokenUsage: result.tokenUsage, }; } } diff --git a/server/modules/providers/list/gemini/gemini.provider.ts b/server/modules/providers/list/gemini/gemini.provider.ts index d968b7c0..2fb8a7c2 100644 --- a/server/modules/providers/list/gemini/gemini.provider.ts +++ b/server/modules/providers/list/gemini/gemini.provider.ts @@ -1,13 +1,15 @@ import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js'; import { GeminiProviderAuth } from '@/modules/providers/list/gemini/gemini-auth.provider.js'; import { GeminiMcpProvider } from '@/modules/providers/list/gemini/gemini-mcp.provider.js'; +import { GeminiSessionSynchronizer } from '@/modules/providers/list/gemini/gemini-session-synchronizer.provider.js'; import { GeminiSessionsProvider } from '@/modules/providers/list/gemini/gemini-sessions.provider.js'; -import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js'; +import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js'; export class GeminiProvider extends AbstractProvider { readonly mcp = new GeminiMcpProvider(); readonly auth: IProviderAuth = new GeminiProviderAuth(); readonly sessions: IProviderSessions = new GeminiSessionsProvider(); + readonly sessionSynchronizer: IProviderSessionSynchronizer = new GeminiSessionSynchronizer(); constructor() { super('gemini'); diff --git a/server/modules/providers/provider.routes.ts b/server/modules/providers/provider.routes.ts index 895aba84..af6d16d6 100644 --- a/server/modules/providers/provider.routes.ts +++ b/server/modules/providers/provider.routes.ts @@ -2,6 +2,8 @@ import express, { type Request, type Response } from 'express'; import { providerAuthService } from '@/modules/providers/services/provider-auth.service.js'; import { providerMcpService } from '@/modules/providers/services/mcp.service.js'; +import { sessionConversationsSearchService } from '@/modules/providers/services/session-conversations-search.service.js'; +import { sessionsService } from '@/modules/providers/services/sessions.service.js'; import type { LLMProvider, McpScope, McpTransport, UpsertProviderMcpServerInput } from '@/shared/types.js'; import { AppError, asyncHandler, createApiSuccessResponse } from '@/shared/utils.js'; @@ -25,6 +27,20 @@ const readPathParam = (value: unknown, name: string): string => { const normalizeProviderParam = (value: unknown): string => readPathParam(value, 'provider').trim().toLowerCase(); +const SESSION_ID_PATTERN = /^[a-zA-Z0-9._-]{1,120}$/; + +const parseSessionId = (value: unknown): string => { + const sessionId = readPathParam(value, 'sessionId').trim(); + if (!SESSION_ID_PATTERN.test(sessionId)) { + throw new AppError('Invalid sessionId.', { + code: 'INVALID_SESSION_ID', + statusCode: 400, + }); + } + + return sessionId; +}; + const readOptionalQueryString = (value: unknown): string | undefined => { if (typeof value !== 'string') { return undefined; @@ -34,6 +50,29 @@ const readOptionalQueryString = (value: unknown): string | undefined => { return normalized.length > 0 ? normalized : undefined; }; +const parseOptionalBooleanQuery = (value: unknown, name: string): boolean | undefined => { + if (value === undefined) { + return undefined; + } + + const normalized = readOptionalQueryString(value); + if (!normalized) { + return undefined; + } + + if (normalized === 'true') { + return true; + } + if (normalized === 'false') { + return false; + } + + throw new AppError(`${name} must be "true" or "false".`, { + code: 'INVALID_QUERY_PARAMETER', + statusCode: 400, + }); +}; + const parseMcpScope = (value: unknown): McpScope | undefined => { if (value === undefined) { return undefined; @@ -103,19 +142,19 @@ const parseMcpUpsertPayload = (payload: unknown): UpsertProviderMcpServerInput = args: Array.isArray(body.args) ? body.args.filter((entry): entry is string => typeof entry === 'string') : undefined, env: typeof body.env === 'object' && body.env !== null ? Object.fromEntries( - Object.entries(body.env as Record).filter( - (entry): entry is [string, string] => typeof entry[1] === 'string', - ), - ) + Object.entries(body.env as Record).filter( + (entry): entry is [string, string] => typeof entry[1] === 'string', + ), + ) : undefined, cwd: readOptionalQueryString(body.cwd), url: readOptionalQueryString(body.url), headers: typeof body.headers === 'object' && body.headers !== null ? Object.fromEntries( - Object.entries(body.headers as Record).filter( - (entry): entry is [string, string] => typeof entry[1] === 'string', - ), - ) + Object.entries(body.headers as Record).filter( + (entry): entry is [string, string] => typeof entry[1] === 'string', + ), + ) : undefined, envVars: Array.isArray(body.envVars) ? body.envVars.filter((entry): entry is string => typeof entry === 'string') @@ -123,10 +162,10 @@ const parseMcpUpsertPayload = (payload: unknown): UpsertProviderMcpServerInput = bearerTokenEnvVar: readOptionalQueryString(body.bearerTokenEnvVar), envHttpHeaders: typeof body.envHttpHeaders === 'object' && body.envHttpHeaders !== null ? Object.fromEntries( - Object.entries(body.envHttpHeaders as Record).filter( - (entry): entry is [string, string] => typeof entry[1] === 'string', - ), - ) + Object.entries(body.envHttpHeaders as Record).filter( + (entry): entry is [string, string] => typeof entry[1] === 'string', + ), + ) : undefined, }; }; @@ -143,6 +182,62 @@ const parseProvider = (value: unknown): LLMProvider => { }); }; +const parseSessionRenameSummary = (payload: unknown): string => { + if (!payload || typeof payload !== 'object') { + throw new AppError('Request body must be an object.', { + code: 'INVALID_REQUEST_BODY', + statusCode: 400, + }); + } + + const body = payload as Record; + const summary = typeof body.summary === 'string' ? body.summary.trim() : ''; + if (!summary) { + throw new AppError('Summary is required.', { + code: 'INVALID_SESSION_SUMMARY', + statusCode: 400, + }); + } + + if (summary.length > 500) { + throw new AppError('Summary must not exceed 500 characters.', { + code: 'INVALID_SESSION_SUMMARY', + statusCode: 400, + }); + } + + return summary; +}; + +const parseSessionSearchQuery = (value: unknown): string => { + const query = readOptionalQueryString(value) ?? ''; + if (query.length < 2) { + throw new AppError('Query must be at least 2 characters', { + code: 'INVALID_SEARCH_QUERY', + statusCode: 400, + }); + } + + return query; +}; + +const parseSessionSearchLimit = (value: unknown): number => { + const raw = readOptionalQueryString(value); + if (!raw) { + return 50; + } + + const parsed = Number.parseInt(raw, 10); + if (Number.isNaN(parsed)) { + throw new AppError('limit must be a valid integer.', { + code: 'INVALID_QUERY_PARAMETER', + statusCode: 400, + }); + } + + return Math.max(1, Math.min(parsed, 100)); +}; + router.get( '/:provider/auth/status', asyncHandler(async (req: Request, res: Response) => { @@ -152,6 +247,7 @@ router.get( }), ); +// ----------------- MCP routes ----------------- router.get( '/:provider/mcp/servers', asyncHandler(async (req: Request, res: Response) => { @@ -214,4 +310,116 @@ router.post( }), ); +// ----------------- Session routes ----------------- +router.delete( + '/sessions/:sessionId', + asyncHandler(async (req: Request, res: Response) => { + const sessionId = parseSessionId(req.params.sessionId); + const deletedFromDisk = parseOptionalBooleanQuery(req.query.deletedFromDisk, 'deletedFromDisk') ?? false; + const result = await sessionsService.deleteSessionById(sessionId, deletedFromDisk); + res.json(createApiSuccessResponse(result)); + }), +); + +router.put( + '/sessions/:sessionId', + asyncHandler(async (req: Request, res: Response) => { + const sessionId = parseSessionId(req.params.sessionId); + const summary = parseSessionRenameSummary(req.body); + const result = sessionsService.renameSessionById(sessionId, summary); + res.json(createApiSuccessResponse(result)); + }), +); + +router.get( + '/sessions/:sessionId/messages', + asyncHandler(async (req: Request, res: Response) => { + const sessionId = parseSessionId(req.params.sessionId); + const limitRaw = readOptionalQueryString(req.query.limit); + const offsetRaw = readOptionalQueryString(req.query.offset); + + let limit: number | null = null; + if (limitRaw !== undefined) { + const parsedLimit = Number.parseInt(limitRaw, 10); + if (Number.isNaN(parsedLimit) || parsedLimit < 0) { + throw new AppError('limit must be a non-negative integer.', { + code: 'INVALID_QUERY_PARAMETER', + statusCode: 400, + }); + } + limit = parsedLimit; + } + + let offset = 0; + if (offsetRaw !== undefined) { + const parsedOffset = Number.parseInt(offsetRaw, 10); + if (Number.isNaN(parsedOffset) || parsedOffset < 0) { + throw new AppError('offset must be a non-negative integer.', { + code: 'INVALID_QUERY_PARAMETER', + statusCode: 400, + }); + } + offset = parsedOffset; + } + + const result = await sessionsService.fetchHistory(sessionId, { + limit, + offset, + }); + res.json(result); + }), +); + +router.get('/search/sessions', asyncHandler(async (req: Request, res: Response) => { + const query = parseSessionSearchQuery(req.query.q); + const limit = parseSessionSearchLimit(req.query.limit); + + res.writeHead(200, { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + Connection: 'keep-alive', + 'X-Accel-Buffering': 'no', + }); + + let closed = false; + const abortController = new AbortController(); + req.on('close', () => { + closed = true; + abortController.abort(); + }); + + try { + await sessionConversationsSearchService.search({ + query, + limit, + signal: abortController.signal, + onProgress: ({ projectResult, totalMatches, scannedProjects, totalProjects }) => { + if (closed) { + return; + } + + if (projectResult) { + res.write(`event: result\ndata: ${JSON.stringify({ projectResult, totalMatches, scannedProjects, totalProjects })}\n\n`); + return; + } + + res.write(`event: progress\ndata: ${JSON.stringify({ totalMatches, scannedProjects, totalProjects })}\n\n`); + }, + }); + + if (!closed) { + res.write('event: done\ndata: {}\n\n'); + } + } catch (error) { + console.error('Error searching conversations:', error); + if (!closed) { + res.write(`event: error\ndata: ${JSON.stringify({ error: 'Search failed' })}\n\n`); + } + } finally { + if (!closed) { + res.end(); + } + } +})); + export default router; diff --git a/server/modules/providers/services/session-conversations-search.service.ts b/server/modules/providers/services/session-conversations-search.service.ts new file mode 100644 index 00000000..afc8bdac --- /dev/null +++ b/server/modules/providers/services/session-conversations-search.service.ts @@ -0,0 +1,1167 @@ +import fsSync, { promises as fs } from 'node:fs'; +import path from 'node:path'; +import readline from 'node:readline'; + +import { spawn } from 'cross-spawn'; +import { rgPath } from '@vscode/ripgrep'; + +import { projectsDb, sessionsDb } from '@/modules/database/index.js'; + +type AnyRecord = Record; +type SearchableProvider = 'claude' | 'codex' | 'gemini'; + +type SearchSnippetHighlight = { + start: number; + end: number; +}; + +type SessionConversationMatch = { + role: string; + snippet: string; + highlights: SearchSnippetHighlight[]; + timestamp: string | null; + provider: SearchableProvider; + messageUuid?: string | null; +}; + +type SessionConversationResult = { + sessionId: string; + provider: SearchableProvider; + sessionSummary: string; + matches: SessionConversationMatch[]; +}; + +type ProjectConversationResult = { + projectId: string | null; + projectName: string; + projectDisplayName: string; + sessions: SessionConversationResult[]; +}; + +export type SessionConversationSearchProgressUpdate = { + projectResult: ProjectConversationResult | null; + totalMatches: number; + scannedProjects: number; + totalProjects: number; +}; + +type SearchSessionConversationsInput = { + query: string; + limit: number; + signal?: AbortSignal; + onProgress?: (update: SessionConversationSearchProgressUpdate) => void; +}; + +type SessionRepositoryRow = ReturnType[number]; +type SearchableSessionRow = SessionRepositoryRow & { + provider: SearchableProvider; + jsonl_path: string; +}; + +type SearchRuntime = { + matchesQuery: (text: string) => boolean; + buildSnippet: (text: string) => { snippet: string; highlights: SearchSnippetHighlight[] }; + limit: number; + totalMatches: number; + isAborted: () => boolean; + matchedSessionKeys: Set; + claudeSessionsByFileKey: Map; + claudeFileResultsCache: Map>; +}; + +type SearchablePathEntry = { + normalizedPath: string; + absolutePath: string; +}; + +type ProjectBucket = { + key: string; + projectId: string | null; + projectName: string; + projectDisplayName: string; + sessions: SearchableSessionRow[]; +}; + +const SUPPORTED_PROVIDERS = new Set(['claude', 'codex', 'gemini']); +const MAX_MATCHES_PER_SESSION = 2; +const RIPGREP_FILE_CHUNK_SIZE = 40; +const RIPGREP_CHUNK_CONCURRENCY = 6; +const UNKNOWN_PROJECT_KEY = '__unknown_project__'; + +const INTERNAL_CONTENT_PREFIXES = [ + '', + '', + '', + '', + '', + 'Caveat:', + 'This session is being continued from a previous', + 'Invalid API key', + '[Request interrupted', +] as const; + +/** + * Codex includes extra internal metadata tags that should not surface as + * user-facing searchable conversation content. + */ +const CODEX_INTERNAL_CONTENT_PREFIXES = [ + '', + '', +] as const; + +function normalizeComparablePath(inputPath: string): string { + if (!inputPath || typeof inputPath !== 'string') { + return ''; + } + + const withoutLongPathPrefix = inputPath.startsWith('\\\\?\\') + ? inputPath.slice(4) + : inputPath; + const normalized = path.normalize(withoutLongPathPrefix.trim()); + if (!normalized) { + return ''; + } + + const resolved = path.resolve(normalized); + return process.platform === 'win32' ? resolved.toLowerCase() : resolved; +} + +function chunkArray(items: TItem[], size: number): TItem[][] { + if (size <= 0) { + return [items]; + } + + const chunks: TItem[][] = []; + for (let idx = 0; idx < items.length; idx += size) { + chunks.push(items.slice(idx, idx + size)); + } + return chunks; +} + +function getSessionKey(session: Pick): string { + return `${session.provider}:${session.session_id}`; +} + +function makeProjectKey(projectPath: string | null): string { + const normalized = typeof projectPath === 'string' ? projectPath.trim() : ''; + return normalized.length > 0 ? normalized : UNKNOWN_PROJECT_KEY; +} + +function toSummaryText(customName: string | null, fallback: string | null | undefined, emptyLabel: string): string { + const trimmedCustomName = typeof customName === 'string' ? customName.trim() : ''; + if (trimmedCustomName) { + return trimmedCustomName; + } + + const trimmedFallback = typeof fallback === 'string' ? fallback.trim() : ''; + if (!trimmedFallback) { + return emptyLabel; + } + + return trimmedFallback.length > 50 ? `${trimmedFallback.slice(0, 50)}...` : trimmedFallback; +} + +function isInternalContent(content: string): boolean { + return INTERNAL_CONTENT_PREFIXES.some((prefix) => content.startsWith(prefix)); +} + +function isInternalCodexContent(content: string): boolean { + const normalized = content.trimStart(); + return CODEX_INTERNAL_CONTENT_PREFIXES.some((prefix) => normalized.startsWith(prefix)); +} + +function escapeRegex(value: string): string { + return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +function createWordMatcher( + rawQuery: string, + words: string[], +): Pick { + const normalizedQuery = rawQuery.trim().replace(/\s+/g, ' '); + const requireExactPhrase = words.length > 1 && normalizedQuery.length > 0; + const wordPatterns = words.map((word) => new RegExp(`(? escapeRegex(word)).join('\\s+'); + const phraseRegex = new RegExp(phrasePattern, 'iu'); + + const allWordsMatch = (textLower: string): boolean => + wordPatterns.every((pattern) => pattern.test(textLower)); + + const matchesQuery = (text: string): boolean => { + if (typeof text !== 'string' || text.length === 0) { + return false; + } + + if (requireExactPhrase) { + return phraseRegex.test(text); + } + + if (phraseRegex.test(text)) { + return true; + } + + if (words.length === 1) { + return allWordsMatch(text.toLowerCase()); + } + + return allWordsMatch(text.toLowerCase()); + }; + + const buildSnippet = ( + text: string, + snippetLen = 150, + ): { snippet: string; highlights: SearchSnippetHighlight[] } => { + const textLower = text.toLowerCase(); + let firstIndex = -1; + let firstWordLen = 0; + let phraseStart = -1; + let phraseLength = 0; + + const phraseMatch = phraseRegex.exec(text); + if (phraseMatch) { + phraseStart = phraseMatch.index; + phraseLength = phraseMatch[0].length; + firstIndex = phraseStart; + firstWordLen = phraseLength; + } + + if (firstIndex === -1) { + for (const word of words) { + const regex = new RegExp(`(? 0 ? '...' : ''; + const suffix = end < text.length ? '...' : ''; + const snippetBody = text.slice(start, end).replace(/\n/g, ' '); + const snippet = `${prefix}${snippetBody}${suffix}`; + + const snippetLower = snippet.toLowerCase(); + const highlights: SearchSnippetHighlight[] = []; + + if (phraseStart >= start && phraseStart + phraseLength <= end) { + const phraseOffset = prefix.length + (phraseStart - start); + highlights.push({ + start: phraseOffset, + end: phraseOffset + phraseLength, + }); + } + + if (!requireExactPhrase) { + for (const word of words) { + const regex = new RegExp(`(? left.start - right.start); + const merged: SearchSnippetHighlight[] = []; + for (const highlight of highlights) { + const previous = merged[merged.length - 1]; + if (previous && highlight.start <= previous.end) { + previous.end = Math.max(previous.end, highlight.end); + } else { + merged.push({ ...highlight }); + } + } + + return { snippet, highlights: merged }; + }; + + return { matchesQuery, buildSnippet }; +} + +function extractClaudeText(content: unknown): string { + if (typeof content === 'string') { + return content; + } + + if (!Array.isArray(content)) { + return ''; + } + + return content + .filter((part: AnyRecord) => part?.type === 'text' && typeof part?.text === 'string') + .map((part: AnyRecord) => String(part.text)) + .join(' '); +} + +function extractCodexText(content: unknown): string { + if (typeof content === 'string') { + return content; + } + + if (!Array.isArray(content)) { + return ''; + } + + return content + .map((item) => { + if (!item || typeof item !== 'object') { + return ''; + } + + const record = item as AnyRecord; + if ( + (record.type === 'input_text' || record.type === 'output_text' || record.type === 'text') + && typeof record.text === 'string' + ) { + return record.text; + } + + return ''; + }) + .filter(Boolean) + .join(' '); +} + +function extractGeminiText(content: unknown): string { + if (typeof content === 'string') { + return content; + } + + if (!Array.isArray(content)) { + return ''; + } + + return content + .filter((part: AnyRecord) => typeof part?.text === 'string') + .map((part: AnyRecord) => String(part.text)) + .join(' '); +} + +function normalizeSearchableSessions(rows: SessionRepositoryRow[]): SearchableSessionRow[] { + const normalizedRows: SearchableSessionRow[] = []; + + for (const row of rows) { + const provider = row.provider as SearchableProvider; + if (!SUPPORTED_PROVIDERS.has(provider)) { + continue; + } + + const rawJsonlPath = typeof row.jsonl_path === 'string' ? row.jsonl_path.trim() : ''; + if (!rawJsonlPath) { + continue; + } + + const absoluteJsonlPath = path.resolve(rawJsonlPath); + if (!fsSync.existsSync(absoluteJsonlPath)) { + continue; + } + + normalizedRows.push({ + ...row, + provider, + jsonl_path: absoluteJsonlPath, + }); + } + + return normalizedRows; +} + +function buildProjectBuckets(searchableSessions: SearchableSessionRow[]): ProjectBucket[] { + const projectBuckets = new Map(); + const projectMetadataCache = new Map(); + + for (const session of searchableSessions) { + const key = makeProjectKey(session.project_path); + if (!projectBuckets.has(key)) { + if (!projectMetadataCache.has(key)) { + if (key === UNKNOWN_PROJECT_KEY) { + projectMetadataCache.set(key, { + projectId: null, + projectDisplayName: 'Unknown Project', + }); + } else { + const projectRow = projectsDb.getProjectPath(key); + const customProjectName = typeof projectRow?.custom_project_name === 'string' + ? projectRow.custom_project_name.trim() + : ''; + const displayName = customProjectName || path.basename(key) || key; + + projectMetadataCache.set(key, { + projectId: projectRow?.project_id ?? null, + projectDisplayName: displayName, + }); + } + } + + const metadata = projectMetadataCache.get(key) as { projectId: string | null; projectDisplayName: string }; + projectBuckets.set(key, { + key, + projectId: metadata.projectId, + projectName: key, + projectDisplayName: metadata.projectDisplayName, + sessions: [], + }); + } + + const bucket = projectBuckets.get(key) as ProjectBucket; + bucket.sessions.push(session); + } + + const buckets = Array.from(projectBuckets.values()); + for (const bucket of buckets) { + bucket.sessions.sort((left, right) => { + const leftTs = new Date(left.updated_at || left.created_at || 0).getTime(); + const rightTs = new Date(right.updated_at || right.created_at || 0).getTime(); + return rightTs - leftTs; + }); + } + + return buckets; +} + +/** + * Executes ripgrep with the file list explicitly provided from sessionsDb jsonl paths. + * + * This avoids recursive directory walks and uses a fixed known candidate list. + */ +async function runRipgrepFilesWithMatches( + pattern: string, + filePaths: string[], + signal?: AbortSignal, +): Promise> { + if (!pattern || filePaths.length === 0 || signal?.aborted) { + return new Set(); + } + + return new Promise((resolve, reject) => { + const args = [ + '--files-with-matches', + '--no-messages', + '--ignore-case', + '--fixed-strings', + '--', + pattern, + ...filePaths, + ]; + const rg = spawn(rgPath, args, { + stdio: ['ignore', 'pipe', 'pipe'], + windowsHide: true, + }); + + const stdoutChunks: Buffer[] = []; + const stderrChunks: Buffer[] = []; + let aborted = false; + + const abortListener = () => { + aborted = true; + rg.kill(); + }; + + if (signal) { + signal.addEventListener('abort', abortListener, { once: true }); + } + + rg.stdout.on('data', (chunk: Buffer) => { + stdoutChunks.push(chunk); + }); + + rg.stderr.on('data', (chunk: Buffer) => { + stderrChunks.push(chunk); + }); + + rg.on('error', (error) => { + if (signal) { + signal.removeEventListener('abort', abortListener); + } + + if (aborted || signal?.aborted) { + resolve(new Set()); + return; + } + + reject(error); + }); + + rg.on('close', (code) => { + if (signal) { + signal.removeEventListener('abort', abortListener); + } + + if (aborted || signal?.aborted) { + resolve(new Set()); + return; + } + + if (code !== 0 && code !== 1) { + const stderr = Buffer.concat(stderrChunks).toString('utf8').trim(); + reject(new Error(`ripgrep failed with code ${String(code)}: ${stderr}`)); + return; + } + + const stdout = Buffer.concat(stdoutChunks).toString('utf8'); + const matchedPaths = new Set(); + + for (const line of stdout.split(/\r?\n/)) { + const trimmed = line.trim(); + if (!trimmed) { + continue; + } + + matchedPaths.add(normalizeComparablePath(trimmed)); + } + + resolve(matchedPaths); + }); + }); +} + +async function findMatchedFileKeys( + searchablePathEntries: SearchablePathEntry[], + rawQuery: string, + words: string[], + signal?: AbortSignal, +): Promise> { + if (searchablePathEntries.length === 0 || words.length === 0 || signal?.aborted) { + return new Set(); + } + + const normalizedQuery = rawQuery.trim().replace(/\s+/g, ' '); + const requireExactPhrase = words.length > 1 && normalizedQuery.length > 0; + + if (requireExactPhrase) { + let matchedForPhrase = searchablePathEntries.slice(); + + // Keep ripgrep as an over-approximation for exact phrase mode by requiring + // each word to appear somewhere in the file, then defer strict phrase + // validation to the in-memory matcher. + for (const word of words) { + if (signal?.aborted) { + return new Set(); + } + + const matchedForWord = new Set(); + const fileChunks = chunkArray( + matchedForPhrase.map((entry) => entry.absolutePath), + RIPGREP_FILE_CHUNK_SIZE, + ); + + let nextChunkIndex = 0; + const workerCount = Math.min(RIPGREP_CHUNK_CONCURRENCY, fileChunks.length); + const workers = Array.from({ length: workerCount }, async () => { + while (nextChunkIndex < fileChunks.length && !signal?.aborted) { + const currentIndex = nextChunkIndex; + nextChunkIndex += 1; + const chunkMatches = await runRipgrepFilesWithMatches(word, fileChunks[currentIndex], signal); + for (const matchedPath of chunkMatches) { + matchedForWord.add(matchedPath); + } + } + }); + + await Promise.all(workers); + if (signal?.aborted) { + return new Set(); + } + + matchedForPhrase = matchedForPhrase.filter((entry) => matchedForWord.has(entry.normalizedPath)); + if (matchedForPhrase.length === 0) { + break; + } + } + + return new Set(matchedForPhrase.map((entry) => entry.normalizedPath)); + } + + let remainingEntries = searchablePathEntries.slice(); + + // Run one ripgrep pass per term and intersect by keeping only files that + // matched every query word. + for (const word of words) { + if (signal?.aborted) { + return new Set(); + } + + const matchedForWord = new Set(); + const fileChunks = chunkArray( + remainingEntries.map((entry) => entry.absolutePath), + RIPGREP_FILE_CHUNK_SIZE, + ); + + let nextChunkIndex = 0; + const workerCount = Math.min(RIPGREP_CHUNK_CONCURRENCY, fileChunks.length); + + const workers = Array.from({ length: workerCount }, async () => { + while (nextChunkIndex < fileChunks.length && !signal?.aborted) { + const currentIndex = nextChunkIndex; + nextChunkIndex += 1; + const chunkMatches = await runRipgrepFilesWithMatches(word, fileChunks[currentIndex], signal); + for (const matchedPath of chunkMatches) { + matchedForWord.add(matchedPath); + } + } + }); + + await Promise.all(workers); + if (signal?.aborted) { + return new Set(); + } + + remainingEntries = remainingEntries.filter((entry) => matchedForWord.has(entry.normalizedPath)); + if (remainingEntries.length === 0) { + break; + } + } + + return new Set(remainingEntries.map((entry) => entry.normalizedPath)); +} + +function addSessionMatch( + runtime: SearchRuntime, + matches: SessionConversationMatch[], + match: SessionConversationMatch, +): void { + if (runtime.totalMatches >= runtime.limit || matches.length >= MAX_MATCHES_PER_SESSION) { + return; + } + + matches.push(match); + runtime.totalMatches += 1; +} + +async function parseClaudeSessionMatches( + session: SearchableSessionRow, + runtime: SearchRuntime, +): Promise { + const fileKey = normalizeComparablePath(session.jsonl_path); + if (!fileKey) { + return null; + } + + if (!runtime.claudeFileResultsCache.has(fileKey)) { + const sessionsForFile = runtime.claudeSessionsByFileKey.get(fileKey) || []; + const matchedSessionsForFile = sessionsForFile.filter((candidate) => + runtime.matchedSessionKeys.has(getSessionKey(candidate)), + ); + + const targetSessions = matchedSessionsForFile.length > 0 + ? matchedSessionsForFile + : [session]; + + const targetSessionIds = new Set(targetSessions.map((candidate) => candidate.session_id)); + const customNameBySessionId = new Map(); + for (const candidate of targetSessions) { + customNameBySessionId.set(candidate.session_id, candidate.custom_name ?? null); + } + + type ClaudeSessionSearchState = { + matches: SessionConversationMatch[]; + pendingSummaries: Map; + fallbackUserText: string | null; + fallbackAssistantText: string | null; + resolvedSummary: string | null; + }; + + const sessionStateById = new Map(); + const getSessionState = (sessionId: string): ClaudeSessionSearchState => { + if (!sessionStateById.has(sessionId)) { + sessionStateById.set(sessionId, { + matches: [], + pendingSummaries: new Map(), + fallbackUserText: null, + fallbackAssistantText: null, + resolvedSummary: null, + }); + } + return sessionStateById.get(sessionId) as ClaudeSessionSearchState; + }; + + let currentSessionId: string | null = null; + + try { + const fileStream = fsSync.createReadStream(session.jsonl_path); + const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity }); + + for await (const line of rl) { + if (runtime.totalMatches >= runtime.limit || runtime.isAborted()) { + break; + } + if (!line.trim()) { + continue; + } + + let entry: AnyRecord; + try { + entry = JSON.parse(line) as AnyRecord; + } catch { + continue; + } + + if (entry.sessionId) { + currentSessionId = String(entry.sessionId); + } + const entrySessionId = entry.sessionId + ? String(entry.sessionId) + : currentSessionId; + if (!entrySessionId || !targetSessionIds.has(entrySessionId)) { + continue; + } + + const state = getSessionState(entrySessionId); + + if (entry.type === 'summary' && entry.summary) { + const summaryValue = String(entry.summary); + if (entry.sessionId) { + state.resolvedSummary = summaryValue; + } else if (entry.leafUuid) { + state.pendingSummaries.set(String(entry.leafUuid), summaryValue); + } + } + + if (!state.resolvedSummary && entry.parentUuid) { + const pendingSummary = state.pendingSummaries.get(String(entry.parentUuid)); + if (pendingSummary) { + state.resolvedSummary = pendingSummary; + } + } + + if (!entry.message?.content || entry.isApiErrorMessage) { + continue; + } + + const role = entry.message.role; + if (role !== 'user' && role !== 'assistant') { + continue; + } + + const text = extractClaudeText(entry.message.content); + if (!text || isInternalContent(text)) { + continue; + } + + if (role === 'user') { + state.fallbackUserText = text; + } else { + state.fallbackAssistantText = text; + } + + if (!runtime.matchesQuery(text)) { + continue; + } + + const { snippet, highlights } = runtime.buildSnippet(text); + addSessionMatch(runtime, state.matches, { + role, + snippet, + highlights, + timestamp: entry.timestamp ? String(entry.timestamp) : null, + provider: 'claude', + messageUuid: entry.uuid ? String(entry.uuid) : null, + }); + } + } catch { + runtime.claudeFileResultsCache.set(fileKey, new Map()); + return null; + } + + const fileResults = new Map(); + for (const [sessionId, state] of sessionStateById.entries()) { + if (state.matches.length === 0) { + continue; + } + + fileResults.set(sessionId, { + sessionId, + provider: 'claude', + sessionSummary: toSummaryText( + customNameBySessionId.get(sessionId) ?? null, + state.resolvedSummary || state.fallbackUserText || state.fallbackAssistantText, + 'New Session', + ), + matches: state.matches, + }); + } + + runtime.claudeFileResultsCache.set(fileKey, fileResults); + } + + return runtime.claudeFileResultsCache.get(fileKey)?.get(session.session_id) ?? null; +} + +function isVisibleCodexUserMessage(payload: AnyRecord | null | undefined): boolean { + if (!payload || payload.type !== 'user_message') { + return false; + } + + if (payload.kind && payload.kind !== 'plain') { + return false; + } + + return typeof payload.message === 'string' && payload.message.trim().length > 0; +} + +async function parseCodexSessionMatches( + session: SearchableSessionRow, + runtime: SearchRuntime, +): Promise { + const matches: SessionConversationMatch[] = []; + let latestUserMessageText: string | null = null; + const seenMessageFingerprints = new Set(); + + try { + const fileStream = fsSync.createReadStream(session.jsonl_path); + const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity }); + + for await (const line of rl) { + if (runtime.totalMatches >= runtime.limit || runtime.isAborted()) { + break; + } + if (!line.trim()) { + continue; + } + + let entry: AnyRecord; + try { + entry = JSON.parse(line) as AnyRecord; + } catch { + continue; + } + + let text: string | null = null; + let role: 'user' | 'assistant' | null = null; + + if (entry.type === 'event_msg' && isVisibleCodexUserMessage(entry.payload as AnyRecord)) { + text = String(entry.payload.message); + role = 'user'; + } else if ( + entry.type === 'event_msg' + && entry.payload?.type === 'agent_reasoning' + && typeof entry.payload?.text === 'string' + ) { + text = String(entry.payload.text); + role = 'assistant'; + } else if (entry.type === 'response_item' && entry.payload?.type === 'message') { + const payload = entry.payload as AnyRecord; + if (payload.role === 'user') { + text = extractCodexText(payload.content); + role = 'user'; + } else if (payload.role === 'assistant') { + text = extractCodexText(payload.content); + role = 'assistant'; + } + } else if (entry.type === 'response_item' && entry.payload?.type === 'reasoning') { + const summaryText = Array.isArray(entry.payload.summary) + ? entry.payload.summary + .map((item: AnyRecord) => (typeof item?.text === 'string' ? item.text : '')) + .filter(Boolean) + .join('\n') + : ''; + + if (summaryText.trim()) { + text = summaryText; + role = 'assistant'; + } + } + + if (!text || !role) { + continue; + } + if (isInternalCodexContent(text)) { + continue; + } + if (role === 'user') { + latestUserMessageText = text; + } + + const fingerprint = `${role}:${text.trim().toLowerCase()}`; + if (seenMessageFingerprints.has(fingerprint)) { + continue; + } + seenMessageFingerprints.add(fingerprint); + + if (!runtime.matchesQuery(text)) { + continue; + } + + const { snippet, highlights } = runtime.buildSnippet(text); + addSessionMatch(runtime, matches, { + role, + snippet, + highlights, + timestamp: entry.timestamp ? String(entry.timestamp) : null, + provider: 'codex', + }); + } + } catch { + return null; + } + + if (matches.length === 0) { + return null; + } + + return { + sessionId: session.session_id, + provider: 'codex', + sessionSummary: toSummaryText(session.custom_name, latestUserMessageText, 'Codex Session'), + matches, + }; +} + +async function parseGeminiSessionMatches( + session: SearchableSessionRow, + runtime: SearchRuntime, +): Promise { + let data: string; + try { + data = await fs.readFile(session.jsonl_path, 'utf8'); + } catch { + return null; + } + + let parsed: AnyRecord; + try { + parsed = JSON.parse(data) as AnyRecord; + } catch { + return null; + } + + const sourceMessages = Array.isArray(parsed.messages) ? parsed.messages as AnyRecord[] : []; + if (sourceMessages.length === 0) { + return null; + } + + const matches: SessionConversationMatch[] = []; + let firstUserText: string | null = null; + + for (const msg of sourceMessages) { + if (runtime.totalMatches >= runtime.limit || runtime.isAborted()) { + break; + } + + const role = msg.type === 'user' + ? 'user' + : (msg.type === 'gemini' || msg.type === 'assistant') + ? 'assistant' + : null; + if (!role) { + continue; + } + + const text = extractGeminiText(msg.content); + if (!text) { + continue; + } + + if (role === 'user' && !firstUserText) { + firstUserText = text; + } + + if (!runtime.matchesQuery(text)) { + continue; + } + + const { snippet, highlights } = runtime.buildSnippet(text); + addSessionMatch(runtime, matches, { + role, + snippet, + highlights, + timestamp: msg.timestamp ? String(msg.timestamp) : null, + provider: 'gemini', + }); + } + + if (matches.length === 0) { + return null; + } + + return { + sessionId: session.session_id, + provider: 'gemini', + sessionSummary: toSummaryText(session.custom_name, firstUserText, 'Gemini Session'), + matches, + }; +} + +async function parseSessionMatches( + session: SearchableSessionRow, + runtime: SearchRuntime, +): Promise { + if (session.provider === 'claude') { + return parseClaudeSessionMatches(session, runtime); + } + if (session.provider === 'codex') { + return parseCodexSessionMatches(session, runtime); + } + return parseGeminiSessionMatches(session, runtime); +} + +export async function searchConversations( + query: string, + limit = 50, + onProjectResult: ((update: SessionConversationSearchProgressUpdate) => void) | null = null, + signal: AbortSignal | null = null, +): Promise<{ results: ProjectConversationResult[]; totalMatches: number; query: string }> { + const safeQuery = typeof query === 'string' ? query.trim() : ''; + const safeLimit = Math.max(1, Math.min(Number.isFinite(limit) ? limit : 50, 200)); + const words = safeQuery.toLowerCase().split(/\s+/).filter((word) => word.length > 0); + + if (words.length === 0) { + return { results: [], totalMatches: 0, query: safeQuery }; + } + + const isAborted = () => signal?.aborted === true; + if (isAborted()) { + return { results: [], totalMatches: 0, query: safeQuery }; + } + + const searchableSessions = normalizeSearchableSessions(sessionsDb.getAllSessions()); + if (searchableSessions.length === 0) { + return { results: [], totalMatches: 0, query: safeQuery }; + } + + const sessionsByPathKey = new Map(); + const searchablePathEntries: SearchablePathEntry[] = []; + + for (const session of searchableSessions) { + const normalizedPath = normalizeComparablePath(session.jsonl_path); + if (!normalizedPath) { + continue; + } + + if (!sessionsByPathKey.has(normalizedPath)) { + sessionsByPathKey.set(normalizedPath, []); + searchablePathEntries.push({ + normalizedPath, + absolutePath: session.jsonl_path, + }); + } + + const pathSessions = sessionsByPathKey.get(normalizedPath) as SearchableSessionRow[]; + pathSessions.push(session); + } + + const matchedFileKeys = await findMatchedFileKeys( + searchablePathEntries, + safeQuery, + words, + signal ?? undefined, + ); + if (isAborted() || matchedFileKeys.size === 0) { + return { results: [], totalMatches: 0, query: safeQuery }; + } + + const matchedSessionKeys = new Set(); + for (const fileKey of matchedFileKeys) { + const sessions = sessionsByPathKey.get(fileKey); + if (!sessions) { + continue; + } + + for (const session of sessions) { + matchedSessionKeys.add(getSessionKey(session)); + } + } + + const projectBuckets = buildProjectBuckets(searchableSessions); + const totalProjects = projectBuckets.length; + const results: ProjectConversationResult[] = []; + let scannedProjects = 0; + + const runtime: SearchRuntime = { + ...createWordMatcher(safeQuery, words), + limit: safeLimit, + totalMatches: 0, + isAborted, + matchedSessionKeys, + claudeSessionsByFileKey: new Map(), + claudeFileResultsCache: new Map>(), + }; + + for (const [fileKey, sessions] of sessionsByPathKey.entries()) { + const claudeSessions = sessions.filter((session) => session.provider === 'claude'); + if (claudeSessions.length > 0) { + runtime.claudeSessionsByFileKey.set(fileKey, claudeSessions); + } + } + + for (const bucket of projectBuckets) { + if (runtime.totalMatches >= runtime.limit || runtime.isAborted()) { + break; + } + + const projectResult: ProjectConversationResult = { + projectId: bucket.projectId, + projectName: bucket.projectName, + projectDisplayName: bucket.projectDisplayName, + sessions: [], + }; + + for (const session of bucket.sessions) { + if (runtime.totalMatches >= runtime.limit || runtime.isAborted()) { + break; + } + if (!matchedSessionKeys.has(getSessionKey(session))) { + continue; + } + + const sessionResult = await parseSessionMatches(session, runtime); + if (sessionResult) { + projectResult.sessions.push(sessionResult); + } + } + + scannedProjects += 1; + if (projectResult.sessions.length > 0) { + results.push(projectResult); + onProjectResult?.({ + projectResult, + totalMatches: runtime.totalMatches, + scannedProjects, + totalProjects, + }); + } else if (onProjectResult && scannedProjects % 10 === 0) { + onProjectResult({ + projectResult: null, + totalMatches: runtime.totalMatches, + scannedProjects, + totalProjects, + }); + } + } + + return { + results, + totalMatches: runtime.totalMatches, + query: safeQuery, + }; +} + +/** + * Application service for session-conversation search. + * + * Provider routes call this service so route handlers stay focused on + * request parsing/response formatting, while search execution remains + * centralized in one place. + */ +export const sessionConversationsSearchService = { + /** + * Streams progress updates while the search scans provider session logs. + */ + async search(input: SearchSessionConversationsInput): Promise { + await searchConversations( + input.query, + input.limit, + input.onProgress ?? null, + input.signal ?? null, + ); + }, +}; diff --git a/server/modules/providers/services/session-synchronizer.service.ts b/server/modules/providers/services/session-synchronizer.service.ts new file mode 100644 index 00000000..47c37091 --- /dev/null +++ b/server/modules/providers/services/session-synchronizer.service.ts @@ -0,0 +1,74 @@ +import { scanStateDb } from '@/modules/database/index.js'; +import { providerRegistry } from '@/modules/providers/provider.registry.js'; +import type { LLMProvider } from '@/shared/types.js'; + +type SessionSynchronizeResult = { + processedByProvider: Record; + failures: string[]; +}; + +/** + * Orchestrates provider-specific session indexers and indexed-session lifecycle operations. + */ +export const sessionSynchronizerService = { + /** + * Runs all provider synchronizers and updates scan_state.last_scanned_at. + */ + async synchronizeSessions(): Promise { + const lastScanAt = scanStateDb.getLastScannedAt(); + const scanBoundary = new Date(); + const processedByProvider: Record = { + claude: 0, + codex: 0, + cursor: 0, + gemini: 0, + }; + const failures: string[] = []; + + const results = await Promise.allSettled( + providerRegistry.listProviders().map(async (provider) => ({ + provider: provider.id, + processed: await provider.sessionSynchronizer.synchronize(lastScanAt ?? undefined), + })) + ); + + for (const result of results) { + if (result.status === 'fulfilled') { + processedByProvider[result.value.provider] = result.value.processed; + continue; + } + + const reason = result.reason instanceof Error ? result.reason.message : String(result.reason); + failures.push(reason); + } + + if (failures.length === 0) { + scanStateDb.updateLastScannedAt(scanBoundary); + } else { + console.warn( + `[Sessions] Skipping scan_state cursor advance because ${failures.length} provider sync(s) failed.`, + ); + } + + return { + processedByProvider, + failures, + }; + }, + + /** + * Indexes one provider artifact file without running a full provider rescan. + */ + async synchronizeProviderFile( + provider: LLMProvider, + filePath: string + ): Promise<{ provider: LLMProvider; indexed: boolean; sessionId: string | null }> { + const resolvedProvider = providerRegistry.resolveProvider(provider); + const sessionId = await resolvedProvider.sessionSynchronizer.synchronizeFile(filePath); + return { + provider, + indexed: Boolean(sessionId), + sessionId, + }; + }, +}; diff --git a/server/modules/providers/services/sessions-watcher.service.ts b/server/modules/providers/services/sessions-watcher.service.ts new file mode 100644 index 00000000..3a7348ed --- /dev/null +++ b/server/modules/providers/services/sessions-watcher.service.ts @@ -0,0 +1,283 @@ +import os from 'node:os'; +import path from 'node:path'; +import { promises as fsPromises } from 'node:fs'; + +import chokidar, { type FSWatcher } from 'chokidar'; + +import { sessionSynchronizerService } from '@/modules/providers/services/session-synchronizer.service.js'; +import { WS_OPEN_STATE, connectedClients } from '@/modules/websocket/index.js'; +import type { LLMProvider } from '@/shared/types.js'; +import { getProjectsWithSessions } from '@/modules/projects/index.js'; + +type WatcherEventType = 'add' | 'change'; + +const PROVIDER_WATCH_PATHS: Array<{ provider: LLMProvider; rootPath: string }> = [ + { + provider: 'claude', + rootPath: path.join(os.homedir(), '.claude', 'projects'), + }, + { + provider: 'cursor', + rootPath: path.join(os.homedir(), '.cursor', 'chats'), + }, + { + provider: 'codex', + rootPath: path.join(os.homedir(), '.codex', 'sessions'), + }, + { + provider: 'gemini', + rootPath: path.join(os.homedir(), '.gemini', 'sessions'), + }, + { + provider: 'gemini', + rootPath: path.join(os.homedir(), '.gemini', 'tmp'), + }, +]; + +const WATCHER_IGNORED_PATTERNS = [ + '**/node_modules/**', + '**/.git/**', + '**/dist/**', + '**/build/**', + '**/*.tmp', + '**/*.swp', + '**/.DS_Store', +]; + +const PROJECTS_UPDATE_DEBOUNCE_MS = 500; +const PROJECTS_UPDATE_MAX_WAIT_MS = 2_000; + +const watchers: FSWatcher[] = []; + +type PendingWatcherUpdate = { + providers: Set; + changeTypes: Set; + updatedSessionIds: Set; +}; + +let pendingWatcherUpdate: PendingWatcherUpdate | null = null; +let pendingWatcherUpdateStartedAt: number | null = null; +let pendingWatcherFlushTimer: ReturnType | null = null; +let watcherRefreshInFlight = false; +let watcherRescheduleAfterRefresh = false; + +/** + * Filters watcher events to provider-specific session artifact file types. + */ +function isWatcherTargetFile(provider: LLMProvider, filePath: string): boolean { + if (provider === 'gemini') { + return filePath.endsWith('.json') || filePath.endsWith('.jsonl'); + } + + return filePath.endsWith('.jsonl'); +} + +function clearPendingWatcherFlushTimer(): void { + if (pendingWatcherFlushTimer) { + clearTimeout(pendingWatcherFlushTimer); + pendingWatcherFlushTimer = null; + } +} + +function schedulePendingWatcherFlush(): void { + if (!pendingWatcherUpdate) { + return; + } + + const now = Date.now(); + if (pendingWatcherUpdateStartedAt === null) { + pendingWatcherUpdateStartedAt = now; + } + + const elapsed = now - pendingWatcherUpdateStartedAt; + const remainingMaxWait = Math.max(0, PROJECTS_UPDATE_MAX_WAIT_MS - elapsed); + const delay = Math.min(PROJECTS_UPDATE_DEBOUNCE_MS, remainingMaxWait); + + clearPendingWatcherFlushTimer(); + pendingWatcherFlushTimer = setTimeout(() => { + void flushPendingWatcherUpdate(); + }, delay); +} + +function queuePendingWatcherUpdate( + eventType: WatcherEventType, + provider: LLMProvider, + updatedSessionId: string | null +): void { + if (!pendingWatcherUpdate) { + pendingWatcherUpdate = { + providers: new Set(), + changeTypes: new Set(), + updatedSessionIds: new Set(), + }; + } + + pendingWatcherUpdate.providers.add(provider); + pendingWatcherUpdate.changeTypes.add(eventType); + if (updatedSessionId) { + pendingWatcherUpdate.updatedSessionIds.add(updatedSessionId); + } + + schedulePendingWatcherFlush(); +} + +async function flushPendingWatcherUpdate(): Promise { + clearPendingWatcherFlushTimer(); + + if (!pendingWatcherUpdate) { + return; + } + + if (watcherRefreshInFlight) { + watcherRescheduleAfterRefresh = true; + return; + } + + const queuedUpdate = pendingWatcherUpdate; + pendingWatcherUpdate = null; + pendingWatcherUpdateStartedAt = null; + watcherRefreshInFlight = true; + + try { + const updatedProjects = await getProjectsWithSessions({ skipSynchronization: true }); + const changeTypes = Array.from(queuedUpdate.changeTypes); + const watchProviders = Array.from(queuedUpdate.providers); + const updatedSessionIds = Array.from(queuedUpdate.updatedSessionIds); + + // Backward-compatible fields stay populated with the first queued values. + const updateMessage = JSON.stringify({ + type: 'projects_updated', + projects: updatedProjects, + timestamp: new Date().toISOString(), + changeType: changeTypes[0] ?? 'change', + updatedSessionId: updatedSessionIds[0] ?? undefined, + watchProvider: watchProviders[0] ?? undefined, + changeTypes, + updatedSessionIds, + watchProviders, + batched: true, + }); + + connectedClients.forEach(client => { + if (client.readyState === WS_OPEN_STATE) { + client.send(updateMessage); + } + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error('Session watcher refresh failed while broadcasting projects_updated', { error: message }); + } finally { + watcherRefreshInFlight = false; + + if (pendingWatcherUpdate || watcherRescheduleAfterRefresh) { + watcherRescheduleAfterRefresh = false; + schedulePendingWatcherFlush(); + } + } +} + +/** + * Handles file watcher updates and triggers provider file-level synchronization. + */ +async function onUpdate( + eventType: WatcherEventType, + filePath: string, + provider: LLMProvider +): Promise { + if (!isWatcherTargetFile(provider, filePath)) { + return; + } + + try { + const result = await sessionSynchronizerService.synchronizeProviderFile(provider, filePath); + if (!result.indexed) { + return; + } + + console.log(`Session synchronization triggered by ${eventType} event for provider "${provider}"`, { + filePath, + sessionId: result.sessionId, + }); + queuePendingWatcherUpdate(eventType, provider, result.sessionId); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`Session watcher sync failed for provider "${provider}"`, { + eventType, + filePath, + error: message, + }); + } +} + +/** + * Starts provider filesystem watchers and performs initial DB synchronization. + */ +export async function initializeSessionsWatcher(): Promise { + console.log('Setting up session watchers'); + + const initialSync = await sessionSynchronizerService.synchronizeSessions(); + console.log('Initial session synchronization complete', { + processedByProvider: initialSync.processedByProvider, + failures: initialSync.failures, + }); + + for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) { + try { + await fsPromises.mkdir(rootPath, { recursive: true }); + + const watcher = chokidar.watch(rootPath, { + ignored: WATCHER_IGNORED_PATTERNS, + persistent: true, + ignoreInitial: true, + followSymlinks: false, + depth: 6, + usePolling: true, + interval: 6_000, + binaryInterval: 6_000, + }); + + watcher + .on('add', (filePath: string) => { + void onUpdate('add', filePath, provider); + }) + .on('change', (filePath: string) => { + void onUpdate('change', filePath, provider); + }) + .on('error', (error: unknown) => { + const message = error instanceof Error ? error.message : String(error); + console.error(`Session watcher error for provider "${provider}"`, { error: message }); + }); + + watchers.push(watcher); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`Failed to initialize session watcher for provider "${provider}"`, { + rootPath, + error: message, + }); + } + } +} + +/** + * Stops all active provider session watchers. + */ +export async function closeSessionsWatcher(): Promise { + clearPendingWatcherFlushTimer(); + + await Promise.all( + watchers.map(async (watcher) => { + try { + await watcher.close(); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error('Failed to close session watcher', { error: message }); + } + }) + ); + watchers.length = 0; + pendingWatcherUpdate = null; + pendingWatcherUpdateStartedAt = null; + watcherRefreshInFlight = false; + watcherRescheduleAfterRefresh = false; +} diff --git a/server/modules/providers/services/sessions.service.ts b/server/modules/providers/services/sessions.service.ts index adff6e8f..32572e95 100644 --- a/server/modules/providers/services/sessions.service.ts +++ b/server/modules/providers/services/sessions.service.ts @@ -1,3 +1,6 @@ +import fsp from 'node:fs/promises'; + +import { sessionsDb } from '@/modules/database/index.js'; import { providerRegistry } from '@/modules/providers/provider.registry.js'; import type { FetchHistoryOptions, @@ -5,6 +8,23 @@ import type { LLMProvider, NormalizedMessage, } from '@/shared/types.js'; +import { AppError } from '@/shared/utils.js'; + +/** + * Removes one file if it exists. + */ +async function removeFileIfExists(filePath: string): Promise { + try { + await fsp.unlink(filePath); + return true; + } catch (error) { + const code = (error as NodeJS.ErrnoException).code; + if (code === 'ENOENT') { + return false; + } + throw error; + } +} /** * Application service for provider-backed session message operations. @@ -33,13 +53,78 @@ export const sessionsService = { }, /** - * Fetches normalized persisted session history for one provider/session pair. + * Fetches persisted history by session id. + * + * Provider and provider-specific lookup hints are resolved from the indexed + * session metadata in the database. */ fetchHistory( - providerName: string, sessionId: string, - options?: FetchHistoryOptions, + options: Pick = {}, ): Promise { - return providerRegistry.resolveProvider(providerName).sessions.fetchHistory(sessionId, options); + const session = sessionsDb.getSessionById(sessionId); + if (!session) { + throw new AppError(`Session "${sessionId}" was not found.`, { + code: 'SESSION_NOT_FOUND', + statusCode: 404, + }); + } + + const provider = session.provider as LLMProvider; + return providerRegistry.resolveProvider(provider).sessions.fetchHistory(sessionId, { + limit: options.limit ?? null, + offset: options.offset ?? 0, + projectPath: session.project_path ?? '', + }); + }, + + /** + * Deletes one persisted session row by id. + * + * When `deletedFromDisk` is true and a session `jsonl_path` exists, the path + * is deleted from disk before the DB row is removed. + */ + async deleteSessionById( + sessionId: string, + deletedFromDisk = false, + ): Promise<{ sessionId: string; deletedFromDisk: boolean }> { + const session = sessionsDb.getSessionById(sessionId); + if (!session) { + throw new AppError(`Session "${sessionId}" was not found.`, { + code: 'SESSION_NOT_FOUND', + statusCode: 404, + }); + } + + let removedFromDisk = false; + if (deletedFromDisk && session.jsonl_path) { + removedFromDisk = await removeFileIfExists(session.jsonl_path); + } + + const deleted = sessionsDb.deleteSessionById(sessionId); + if (!deleted) { + throw new AppError(`Session "${sessionId}" was not found.`, { + code: 'SESSION_NOT_FOUND', + statusCode: 404, + }); + } + + return { sessionId, deletedFromDisk: removedFromDisk }; + }, + + /** + * Renames one session by id without requiring the caller to pass provider. + */ + renameSessionById(sessionId: string, summary: string): { sessionId: string; summary: string } { + const session = sessionsDb.getSessionById(sessionId); + if (!session) { + throw new AppError(`Session "${sessionId}" was not found.`, { + code: 'SESSION_NOT_FOUND', + statusCode: 404, + }); + } + + sessionsDb.updateSessionCustomName(sessionId, summary); + return { sessionId, summary }; }, }; diff --git a/server/modules/providers/shared/base/abstract.provider.ts b/server/modules/providers/shared/base/abstract.provider.ts index 4a591baf..c674364d 100644 --- a/server/modules/providers/shared/base/abstract.provider.ts +++ b/server/modules/providers/shared/base/abstract.provider.ts @@ -1,4 +1,10 @@ -import type { IProvider, IProviderAuth, IProviderMcp, IProviderSessions } from '@/shared/interfaces.js'; +import type { + IProvider, + IProviderAuth, + IProviderMcp, + IProviderSessionSynchronizer, + IProviderSessions, +} from '@/shared/interfaces.js'; import type { LLMProvider } from '@/shared/types.js'; /** @@ -13,6 +19,7 @@ export abstract class AbstractProvider implements IProvider { abstract readonly mcp: IProviderMcp; abstract readonly auth: IProviderAuth; abstract readonly sessions: IProviderSessions; + abstract readonly sessionSynchronizer: IProviderSessionSynchronizer; protected constructor(id: LLMProvider) { this.id = id; diff --git a/server/modules/websocket/README.md b/server/modules/websocket/README.md new file mode 100644 index 00000000..12db6349 --- /dev/null +++ b/server/modules/websocket/README.md @@ -0,0 +1,267 @@ +# WebSocket Module + +This module owns the server-side WebSocket gateway used by: + +1. Chat streaming (`/ws`) +2. Interactive terminal sessions (`/shell`) +3. Plugin WebSocket passthrough (`/plugin-ws/:pluginName`) + +It is intentionally structured as **small services** plus a **barrel export** in `index.ts`. + +## Public API + +`server/modules/websocket/index.ts` exports: + +1. `createWebSocketServer(server, dependencies)` +Creates and wires the shared `ws` server. +2. `connectedClients` and `WS_OPEN_STATE` +Shared chat client registry and open-state constant used by other modules. + +## Why Dependency Injection Is Used + +The module receives runtime-specific functions from `server/index.js` instead of importing legacy runtime files directly. + +Benefits: + +1. Keeps module boundaries clean (`server/modules/*` architecture rule). +2. Makes each service easier to test in isolation. +3. Keeps WebSocket transport concerns separate from provider runtime concerns. + +## Service Map + +| File | Responsibility | +|---|---| +| `services/websocket-server.service.ts` | Creates `WebSocketServer`, binds `verifyClient`, routes connection by pathname | +| `services/websocket-auth.service.ts` | Authenticates upgrade requests and attaches `request.user` | +| `services/chat-websocket.service.ts` | Handles `/ws` chat protocol and provider command/session control messages | +| `services/shell-websocket.service.ts` | Handles `/shell` PTY lifecycle, reconnect buffering, auth URL detection | +| `services/plugin-websocket-proxy.service.ts` | Bridges client socket to plugin socket | +| `services/websocket-writer.service.ts` | Adapts raw WebSocket to writer interface (`send`, `setSessionId`, `getSessionId`) | +| `services/websocket-state.service.ts` | Holds shared chat client set and open-state constant | + +## High-Level Architecture + +```mermaid +flowchart LR + A[HTTP Server] --> B[createWebSocketServer] + B --> C[verifyWebSocketClient] + B --> D{Pathname} + D -->|/ws| E[handleChatConnection] + D -->|/shell| F[handleShellConnection] + D -->|/plugin-ws/:name| G[handlePluginWsProxy] + D -->|other| H[close()] + + E --> I[connectedClients Set] + E --> J[WebSocketWriter] + F --> K[ptySessionsMap] + G --> L[Upstream Plugin ws://127.0.0.1:port/ws] + + I --> M[projects.service broadcastProgress] + I --> N[sessions-watcher.service projects_updated] +``` + +## Connection Handshake + Routing + +```mermaid +sequenceDiagram + participant Client + participant WSS as WebSocketServer + participant Auth as verifyWebSocketClient + participant Router as connection router + participant Chat as /ws handler + participant Shell as /shell handler + participant Proxy as /plugin-ws handler + + Client->>WSS: Upgrade Request + WSS->>Auth: verifyClient(info) + alt Platform mode + Auth->>Auth: authenticateWebSocket(null) + Auth->>Auth: attach request.user + else OSS mode + Auth->>Auth: read token from ?token or Authorization + Auth->>Auth: authenticateWebSocket(token) + Auth->>Auth: attach request.user + end + + alt Auth failed + Auth-->>WSS: false (reject handshake) + else Auth ok + Auth-->>WSS: true + WSS->>Router: on("connection", ws, request) + alt pathname == /ws + Router->>Chat: handleChatConnection(ws, request, deps.chat) + else pathname == /shell + Router->>Shell: handleShellConnection(ws, deps.shell) + else pathname startsWith /plugin-ws/ + Router->>Proxy: handlePluginWsProxy(ws, pathname, getPluginPort) + else unknown + Router->>Router: ws.close() + end + end +``` + +## `/ws` Chat Flow + +When a chat socket connects: + +1. Add socket to `connectedClients`. +2. Build `WebSocketWriter` (captures `userId` from authenticated request). +3. Parse each incoming message with `parseIncomingJsonObject`. +4. Dispatch by `data.type`. +5. On close, remove socket from `connectedClients`. + +### Chat Message Dispatch + +```mermaid +flowchart TD + A[Incoming WS message] --> B[parseIncomingJsonObject] + B -->|invalid| C[send {type:error}] + B -->|ok| D{data.type} + + D -->|claude-command| E[queryClaudeSDK] + D -->|cursor-command| F[spawnCursor] + D -->|codex-command| G[queryCodex] + D -->|gemini-command| H[spawnGemini] + D -->|cursor-resume| I[spawnCursor resume] + D -->|abort-session| J[abort by provider] + D -->|claude-permission-response| K[resolveToolApproval] + D -->|cursor-abort| L[abortCursorSession] + D -->|check-session-status| M[is*SessionActive + optional reconnectSessionWriter] + D -->|get-pending-permissions| N[getPendingApprovalsForSession] + D -->|get-active-sessions| O[getActive*Sessions] +``` + +### Chat Notes + +1. `abort-session` returns a normalized `complete` message with `aborted: true`. +2. `check-session-status` returns `{ type: "session-status", isProcessing }`. +3. Claude status checks can reconnect output stream to the new socket via `reconnectSessionWriter`. + +## `/shell` Terminal Flow + +The shell handler manages persistent PTY sessions keyed by: + +`_[_cmd_]` + +This enables reconnect behavior and isolates command-specific plain-shell sessions. + +### Shell Lifecycle + +```mermaid +stateDiagram-v2 + [*] --> WaitingInit + WaitingInit --> ValidateInit: message.type == init + ValidateInit --> ReconnectExisting: session key exists and not login reset + ValidateInit --> SpawnNewPTY: valid path + valid sessionId + ValidateInit --> EmitError: invalid payload/path/sessionId + + ReconnectExisting --> Running: attach ws, replay buffer + SpawnNewPTY --> Running: pty.spawn + wire onData/onExit + + Running --> Running: input -> pty.write + Running --> Running: resize -> pty.resize + Running --> Running: onData -> buffer + output + auth_url detection + Running --> Exited: onExit + Running --> Detached: ws close + + Detached --> Running: reconnect before timeout + Detached --> Killed: timeout reached -> pty.kill + Exited --> [*] + Killed --> [*] + EmitError --> WaitingInit +``` + +### Shell Behaviors in Detail + +1. `init`: +Reads `projectPath`, `sessionId`, `provider`, `hasSession`, `initialCommand`, `isPlainShell`. +2. Login reset: +For login-like commands, existing keyed PTY session is killed and recreated. +3. Validation: +Path must exist and be a directory; `sessionId` must match safe pattern. +4. Command build: +Provider-specific command construction with resume semantics. +5. PTY output buffering: +Stores up to 5000 chunks for replay on reconnect. +6. URL detection: +Strips ANSI, accumulates text buffer, extracts URLs, emits `auth_url` once per normalized URL, supports `autoOpen`. +7. Close behavior: +Socket disconnect does not instantly kill PTY; session is kept alive and terminated on timeout. + +## `/plugin-ws/:pluginName` Proxy Flow + +```mermaid +sequenceDiagram + participant Client + participant Proxy as handlePluginWsProxy + participant PM as getPluginPort + participant Upstream as Plugin WS + + Client->>Proxy: Connect /plugin-ws/:name + Proxy->>Proxy: Validate pluginName regex + alt Invalid name + Proxy-->>Client: close(4400, "Invalid plugin name") + else Valid + Proxy->>PM: getPluginPort(name) + alt Plugin not running + Proxy-->>Client: close(4404, "Plugin not running") + else Port found + Proxy->>Upstream: new WebSocket(ws://127.0.0.1:port/ws) + Client-->>Upstream: relay messages bidirectionally + Upstream-->>Client: relay messages bidirectionally + Upstream-->>Client: close propagation + Client-->>Upstream: close propagation + Upstream-->>Client: close(4502, "Upstream error") on upstream error + end + end +``` + +## Shared Client Registry and Broadcasts + +Only chat sockets (`/ws`) are tracked in `connectedClients`. + +That shared set is consumed by: + +1. `modules/projects/services/projects-with-sessions-fetch.service.ts` +Broadcasts `loading_progress` while project snapshots are being built. +2. `modules/providers/services/sessions-watcher.service.ts` +Broadcasts `projects_updated` when provider session artifacts change. + +This design centralizes cross-module realtime fanout without requiring route-local references to WebSocket internals. + +## Writer Adapter (`WebSocketWriter`) + +`WebSocketWriter` normalizes chat transport behavior to match existing writer-style interfaces used elsewhere. + +Methods: + +1. `send(data)` +JSON-serializes and sends only if socket is open. +2. `setSessionId(sessionId)` / `getSessionId()` +Supports provider session bookkeeping and resume flows. +3. `updateWebSocket(newRawWs)` +Allows active session stream redirection on reconnect. + +## Error Handling and Close Codes + +Current explicit close codes in this module: + +1. `4400`: Invalid plugin name +2. `4404`: Plugin not running +3. `4502`: Upstream plugin WebSocket error + +Other errors: + +1. Chat handler catches and emits `{ type: "error", error }`. +2. Shell handler catches and writes terminal-visible error output. +3. Unknown websocket paths are closed immediately. + +## Extending This Module + +To add a new websocket route: + +1. Add a new handler service under `services/`. +2. Extend `WebSocketServerDependencies` in `websocket-server.service.ts` if needed. +3. Add a new pathname branch in the router. +4. Wire dependency injection from `server/index.js`. +5. Keep `index.ts` as barrel-only export surface. diff --git a/server/modules/websocket/index.ts b/server/modules/websocket/index.ts new file mode 100644 index 00000000..da65ee82 --- /dev/null +++ b/server/modules/websocket/index.ts @@ -0,0 +1,2 @@ +export { WS_OPEN_STATE, connectedClients } from './services/websocket-state.service.js'; +export { createWebSocketServer } from './services/websocket-server.service.js'; diff --git a/server/modules/websocket/services/chat-websocket.service.ts b/server/modules/websocket/services/chat-websocket.service.ts new file mode 100644 index 00000000..95fabe55 --- /dev/null +++ b/server/modules/websocket/services/chat-websocket.service.ts @@ -0,0 +1,271 @@ +import type { WebSocket } from 'ws'; + +import { connectedClients } from '@/modules/websocket/services/websocket-state.service.js'; +import { WebSocketWriter } from '@/modules/websocket/services/websocket-writer.service.js'; +import type { + AnyRecord, + AuthenticatedWebSocketRequest, + LLMProvider, +} from '@/shared/types.js'; +import { createNormalizedMessage, parseIncomingJsonObject } from '@/shared/utils.js'; + +type ChatIncomingMessage = AnyRecord & { + type?: string; + command?: string; + options?: AnyRecord; + provider?: string; + sessionId?: string; + requestId?: string; + allow?: unknown; + updatedInput?: unknown; + message?: unknown; + rememberEntry?: unknown; +}; + +const DEFAULT_PROVIDER: LLMProvider = 'claude'; + +type ChatWebSocketDependencies = { + queryClaudeSDK: (command: string, options: unknown, writer: WebSocketWriter) => Promise; + spawnCursor: (command: string, options: unknown, writer: WebSocketWriter) => Promise; + queryCodex: (command: string, options: unknown, writer: WebSocketWriter) => Promise; + spawnGemini: (command: string, options: unknown, writer: WebSocketWriter) => Promise; + abortClaudeSDKSession: (sessionId: string) => Promise; + abortCursorSession: (sessionId: string) => boolean; + abortCodexSession: (sessionId: string) => boolean; + abortGeminiSession: (sessionId: string) => boolean; + resolveToolApproval: ( + requestId: string, + payload: { + allow: boolean; + updatedInput?: unknown; + message?: string; + rememberEntry?: unknown; + } + ) => void; + isClaudeSDKSessionActive: (sessionId: string) => boolean; + isCursorSessionActive: (sessionId: string) => boolean; + isCodexSessionActive: (sessionId: string) => boolean; + isGeminiSessionActive: (sessionId: string) => boolean; + reconnectSessionWriter: (sessionId: string, ws: WebSocket) => boolean; + getPendingApprovalsForSession: (sessionId: string) => unknown[]; + getActiveClaudeSDKSessions: () => unknown; + getActiveCursorSessions: () => unknown; + getActiveCodexSessions: () => unknown; + getActiveGeminiSessions: () => unknown; +}; + +/** + * Normalizes potentially invalid provider names coming from websocket payloads. + */ +function readProvider(value: unknown): LLMProvider { + if (value === 'claude' || value === 'cursor' || value === 'codex' || value === 'gemini') { + return value; + } + + return DEFAULT_PROVIDER; +} + +/** + * Extracts the authenticated request user id in the formats currently produced + * by platform and OSS auth code paths. + */ +function readRequestUserId( + request: AuthenticatedWebSocketRequest | undefined +): string | number | null { + const user = request?.user; + if (!user) { + return null; + } + + if (typeof user.id === 'string' || typeof user.id === 'number') { + return user.id; + } + + if (typeof user.userId === 'string' || typeof user.userId === 'number') { + return user.userId; + } + + return null; +} + +/** + * Handles authenticated chat websocket messages used by the main chat panel. + */ +export function handleChatConnection( + ws: WebSocket, + request: AuthenticatedWebSocketRequest, + dependencies: ChatWebSocketDependencies +): void { + console.log('[INFO] Chat WebSocket connected'); + connectedClients.add(ws); + + const writer = new WebSocketWriter(ws, readRequestUserId(request)); + + ws.on('message', async (rawMessage) => { + try { + const parsed = parseIncomingJsonObject(rawMessage); + if (!parsed) { + throw new Error('Invalid websocket payload'); + } + + const data = parsed as ChatIncomingMessage; + const messageType = data.type; + if (!messageType) { + throw new Error('Message type is required'); + } + + if (messageType === 'claude-command') { + await dependencies.queryClaudeSDK(data.command ?? '', data.options, writer); + return; + } + + if (messageType === 'cursor-command') { + await dependencies.spawnCursor(data.command ?? '', data.options, writer); + return; + } + + if (messageType === 'codex-command') { + await dependencies.queryCodex(data.command ?? '', data.options, writer); + return; + } + + if (messageType === 'gemini-command') { + await dependencies.spawnGemini(data.command ?? '', data.options, writer); + return; + } + + if (messageType === 'cursor-resume') { + await dependencies.spawnCursor( + '', + { + sessionId: data.sessionId, + resume: true, + cwd: data.options?.cwd, + }, + writer + ); + return; + } + + if (messageType === 'abort-session') { + const provider = readProvider(data.provider); + const sessionId = typeof data.sessionId === 'string' ? data.sessionId : ''; + let success = false; + + if (provider === 'cursor') { + success = dependencies.abortCursorSession(sessionId); + } else if (provider === 'codex') { + success = dependencies.abortCodexSession(sessionId); + } else if (provider === 'gemini') { + success = dependencies.abortGeminiSession(sessionId); + } else { + success = await dependencies.abortClaudeSDKSession(sessionId); + } + + writer.send( + createNormalizedMessage({ + kind: 'complete', + exitCode: success ? 0 : 1, + aborted: true, + success, + sessionId, + provider, + }) + ); + return; + } + + if (messageType === 'claude-permission-response') { + if (typeof data.requestId === 'string' && data.requestId.length > 0) { + dependencies.resolveToolApproval(data.requestId, { + allow: Boolean(data.allow), + updatedInput: data.updatedInput, + message: typeof data.message === 'string' ? data.message : undefined, + rememberEntry: data.rememberEntry, + }); + } + return; + } + + if (messageType === 'cursor-abort') { + const sessionId = typeof data.sessionId === 'string' ? data.sessionId : ''; + const success = dependencies.abortCursorSession(sessionId); + writer.send( + createNormalizedMessage({ + kind: 'complete', + exitCode: success ? 0 : 1, + aborted: true, + success, + sessionId, + provider: 'cursor', + }) + ); + return; + } + + if (messageType === 'check-session-status') { + const provider = readProvider(data.provider); + const sessionId = typeof data.sessionId === 'string' ? data.sessionId : ''; + let isActive = false; + + if (provider === 'cursor') { + isActive = dependencies.isCursorSessionActive(sessionId); + } else if (provider === 'codex') { + isActive = dependencies.isCodexSessionActive(sessionId); + } else if (provider === 'gemini') { + isActive = dependencies.isGeminiSessionActive(sessionId); + } else { + isActive = dependencies.isClaudeSDKSessionActive(sessionId); + if (isActive) { + dependencies.reconnectSessionWriter(sessionId, ws); + } + } + + writer.send({ + type: 'session-status', + sessionId, + provider, + isProcessing: isActive, + }); + return; + } + + if (messageType === 'get-pending-permissions') { + const sessionId = typeof data.sessionId === 'string' ? data.sessionId : ''; + if (sessionId && dependencies.isClaudeSDKSessionActive(sessionId)) { + const pending = dependencies.getPendingApprovalsForSession(sessionId); + writer.send({ + type: 'pending-permissions-response', + sessionId, + data: pending, + }); + } + return; + } + + if (messageType === 'get-active-sessions') { + writer.send({ + type: 'active-sessions', + sessions: { + claude: dependencies.getActiveClaudeSDKSessions(), + cursor: dependencies.getActiveCursorSessions(), + codex: dependencies.getActiveCodexSessions(), + gemini: dependencies.getActiveGeminiSessions(), + }, + }); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error('[ERROR] Chat WebSocket error:', message); + writer.send({ + type: 'error', + error: message, + }); + } + }); + + ws.on('close', () => { + console.log('[INFO] Chat client disconnected'); + connectedClients.delete(ws); + }); +} diff --git a/server/modules/websocket/services/plugin-websocket-proxy.service.ts b/server/modules/websocket/services/plugin-websocket-proxy.service.ts new file mode 100644 index 00000000..491fd540 --- /dev/null +++ b/server/modules/websocket/services/plugin-websocket-proxy.service.ts @@ -0,0 +1,65 @@ +import { WebSocket } from 'ws'; + +/** + * Proxies an authenticated client websocket to a plugin websocket endpoint. + */ +export function handlePluginWsProxy( + clientWs: WebSocket, + pathname: string, + getPluginPort: (pluginName: string) => number | null +): void { + const pluginName = pathname.replace('/plugin-ws/', ''); + if (!pluginName || /[^a-zA-Z0-9_-]/.test(pluginName)) { + clientWs.close(4400, 'Invalid plugin name'); + return; + } + + const port = getPluginPort(pluginName); + if (!port) { + clientWs.close(4404, 'Plugin not running'); + return; + } + + const upstream = new WebSocket(`ws://127.0.0.1:${port}/ws`); + + upstream.on('open', () => { + console.log(`[Plugins] WS proxy connected to "${pluginName}" on port ${port}`); + }); + + upstream.on('message', (data) => { + if (clientWs.readyState === WebSocket.OPEN) { + clientWs.send(data); + } + }); + + clientWs.on('message', (data) => { + if (upstream.readyState === WebSocket.OPEN) { + upstream.send(data); + } + }); + + upstream.on('close', () => { + if (clientWs.readyState === WebSocket.OPEN) { + clientWs.close(); + } + }); + + clientWs.on('close', () => { + if (upstream.readyState === WebSocket.OPEN) { + upstream.close(); + } + }); + + upstream.on('error', (error) => { + console.error(`[Plugins] WS proxy error for "${pluginName}":`, error.message); + if (clientWs.readyState === WebSocket.OPEN) { + clientWs.close(4502, 'Upstream error'); + } + }); + + clientWs.on('error', () => { + if (upstream.readyState === WebSocket.OPEN) { + upstream.close(); + } + }); +} diff --git a/server/modules/websocket/services/shell-websocket.service.ts b/server/modules/websocket/services/shell-websocket.service.ts new file mode 100644 index 00000000..9bf7046b --- /dev/null +++ b/server/modules/websocket/services/shell-websocket.service.ts @@ -0,0 +1,453 @@ +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; + +import pty, { type IPty } from 'node-pty'; +import { WebSocket, type RawData } from 'ws'; + +import { parseIncomingJsonObject } from '@/shared/utils.js'; + +type ShellIncomingMessage = { + type?: string; + data?: string; + cols?: number; + rows?: number; + projectPath?: string; + sessionId?: string; + hasSession?: boolean; + provider?: string; + initialCommand?: string; + isPlainShell?: boolean; +}; + +type PtySessionEntry = { + pty: IPty; + ws: WebSocket | null; + buffer: string[]; + timeoutId: NodeJS.Timeout | null; + projectPath: string; + sessionId: string | null; +}; + +const ptySessionsMap = new Map(); +const PTY_SESSION_TIMEOUT = 30 * 60 * 1000; +const SHELL_URL_PARSE_BUFFER_LIMIT = 32768; + +type ShellWebSocketDependencies = { + getSessionById: (sessionId: string) => { cliSessionId?: string } | null | undefined; + stripAnsiSequences: (content: string) => string; + normalizeDetectedUrl: (url: string) => string | null; + extractUrlsFromText: (content: string) => string[]; + shouldAutoOpenUrlFromOutput: (content: string) => boolean; +}; + +/** + * Reads a string field from untyped payloads and falls back when absent. + */ +function readString(value: unknown, fallback = ''): string { + return typeof value === 'string' ? value : fallback; +} + +/** + * Reads a boolean field from untyped payloads and falls back when absent. + */ +function readBoolean(value: unknown, fallback = false): boolean { + return typeof value === 'boolean' ? value : fallback; +} + +/** + * Reads a finite number field from untyped payloads and falls back when absent. + */ +function readNumber(value: unknown, fallback: number): number { + return typeof value === 'number' && Number.isFinite(value) ? value : fallback; +} + +/** + * Parses incoming websocket shell messages and keeps processing safe when + * malformed payloads are received. + */ +function parseShellMessage(rawMessage: RawData): ShellIncomingMessage | null { + const payload = parseIncomingJsonObject(rawMessage); + if (!payload) { + return null; + } + + return payload as ShellIncomingMessage; +} + +/** + * Resolves provider command line for plain shell and agent-backed shell modes. + */ +function buildShellCommand( + message: ShellIncomingMessage, + dependencies: ShellWebSocketDependencies +): string { + const hasSession = readBoolean(message.hasSession); + const sessionId = readString(message.sessionId); + const initialCommand = readString(message.initialCommand); + const provider = readString(message.provider, 'claude'); + const safeSessionIdPattern = /^[a-zA-Z0-9_.\-:]+$/; + const isPlainShell = + readBoolean(message.isPlainShell) || + (!!initialCommand && !hasSession) || + provider === 'plain-shell'; + + if (isPlainShell) { + return initialCommand; + } + + if (provider === 'cursor') { + if (hasSession && sessionId) { + return `cursor-agent --resume="${sessionId}"`; + } + return 'cursor-agent'; + } + + if (provider === 'codex') { + if (hasSession && sessionId) { + if (os.platform() === 'win32') { + return `codex resume "${sessionId}"; if ($LASTEXITCODE -ne 0) { codex }`; + } + return `codex resume "${sessionId}" || codex`; + } + return 'codex'; + } + + if (provider === 'gemini') { + const command = initialCommand || 'gemini'; + let resumeId = sessionId; + if (hasSession && sessionId) { + try { + const existingSession = dependencies.getSessionById(sessionId); + if (existingSession && existingSession.cliSessionId) { + resumeId = existingSession.cliSessionId; + if (!safeSessionIdPattern.test(resumeId)) { + resumeId = ''; + } + } + } catch (error) { + console.error('Failed to get Gemini CLI session ID:', error); + } + } + + if (hasSession && resumeId) { + return `${command} --resume "${resumeId}"`; + } + return command; + } + + const command = initialCommand || 'claude'; + if (hasSession && sessionId) { + if (os.platform() === 'win32') { + return `claude --resume "${sessionId}"; if ($LASTEXITCODE -ne 0) { claude }`; + } + return `claude --resume "${sessionId}" || claude`; + } + return command; +} + +/** + * Handles websocket connections used by the standalone shell terminal UI. + */ +export function handleShellConnection( + ws: WebSocket, + dependencies: ShellWebSocketDependencies +): void { + console.log('[INFO] Shell websocket connected'); + + let shellProcess: IPty | null = null; + let ptySessionKey: string | null = null; + let urlDetectionBuffer = ''; + const announcedAuthUrls = new Set(); + + ws.on('message', async (rawMessage) => { + try { + const data = parseShellMessage(rawMessage); + if (!data?.type) { + throw new Error('Invalid websocket payload'); + } + + if (data.type === 'init') { + const projectPath = readString(data.projectPath, process.cwd()); + const sessionId = readString(data.sessionId) || null; + const hasSession = readBoolean(data.hasSession); + const provider = readString(data.provider, 'claude'); + const initialCommand = readString(data.initialCommand); + const isPlainShell = + readBoolean(data.isPlainShell) || + (!!initialCommand && !hasSession) || + provider === 'plain-shell'; + + urlDetectionBuffer = ''; + announcedAuthUrls.clear(); + + const isLoginCommand = + !!initialCommand && + (initialCommand.includes('setup-token') || + initialCommand.includes('cursor-agent login') || + initialCommand.includes('auth login')); + + const commandSuffix = + isPlainShell && initialCommand + ? `_cmd_${Buffer.from(initialCommand).toString('base64').slice(0, 16)}` + : ''; + ptySessionKey = `${projectPath}_${sessionId ?? 'default'}${commandSuffix}`; + + if (isLoginCommand) { + const oldSession = ptySessionsMap.get(ptySessionKey); + if (oldSession) { + if (oldSession.timeoutId) { + clearTimeout(oldSession.timeoutId); + } + oldSession.pty.kill(); + ptySessionsMap.delete(ptySessionKey); + } + } + + const existingSession = isLoginCommand ? null : ptySessionsMap.get(ptySessionKey); + if (existingSession) { + shellProcess = existingSession.pty; + if (existingSession.timeoutId) { + clearTimeout(existingSession.timeoutId); + } + + ws.send( + JSON.stringify({ + type: 'output', + data: '\x1b[36m[Reconnected to existing session]\x1b[0m\r\n', + }) + ); + + if (existingSession.buffer.length > 0) { + existingSession.buffer.forEach((bufferedData) => { + ws.send( + JSON.stringify({ + type: 'output', + data: bufferedData, + }) + ); + }); + } + + existingSession.ws = ws; + return; + } + + const resolvedProjectPath = path.resolve(projectPath); + try { + const stats = fs.statSync(resolvedProjectPath); + if (!stats.isDirectory()) { + throw new Error('Not a directory'); + } + } catch { + ws.send(JSON.stringify({ type: 'error', message: 'Invalid project path' })); + return; + } + + const safeSessionIdPattern = /^[a-zA-Z0-9_.\-:]+$/; + if (sessionId && !safeSessionIdPattern.test(sessionId)) { + ws.send(JSON.stringify({ type: 'error', message: 'Invalid session ID' })); + return; + } + + const shellCommand = buildShellCommand(data, dependencies); + const shell = os.platform() === 'win32' ? 'powershell.exe' : 'bash'; + const shellArgs = + os.platform() === 'win32' ? ['-Command', shellCommand] : ['-c', shellCommand]; + const termCols = readNumber(data.cols, 80); + const termRows = readNumber(data.rows, 24); + + shellProcess = pty.spawn(shell, shellArgs, { + name: 'xterm-256color', + cols: termCols, + rows: termRows, + cwd: resolvedProjectPath, + env: { + ...process.env, + TERM: 'xterm-256color', + COLORTERM: 'truecolor', + FORCE_COLOR: '3', + }, + }); + + ptySessionsMap.set(ptySessionKey, { + pty: shellProcess, + ws, + buffer: [], + timeoutId: null, + projectPath, + sessionId, + }); + + shellProcess.onData((chunk) => { + if (!ptySessionKey) { + return; + } + + const session = ptySessionsMap.get(ptySessionKey); + if (!session) { + return; + } + + if (session.buffer.length < 5000) { + session.buffer.push(chunk); + } else { + session.buffer.shift(); + session.buffer.push(chunk); + } + + if (session.ws && session.ws.readyState === WebSocket.OPEN) { + let outputData = chunk; + const cleanChunk = dependencies.stripAnsiSequences(chunk); + urlDetectionBuffer = `${urlDetectionBuffer}${cleanChunk}`.slice(-SHELL_URL_PARSE_BUFFER_LIMIT); + + outputData = outputData.replace( + /OPEN_URL:\s*(https?:\/\/[^\s\x1b\x07]+)/g, + '[INFO] Opening in browser: $1' + ); + + const emitAuthUrl = (detectedUrl: string, autoOpen = false) => { + const normalizedUrl = dependencies.normalizeDetectedUrl(detectedUrl); + if (!normalizedUrl) { + return; + } + + const isNewUrl = !announcedAuthUrls.has(normalizedUrl); + if (isNewUrl) { + announcedAuthUrls.add(normalizedUrl); + session.ws?.send( + JSON.stringify({ + type: 'auth_url', + url: normalizedUrl, + autoOpen, + }) + ); + } + }; + + const normalizedDetectedUrls = dependencies.extractUrlsFromText(urlDetectionBuffer) + .map((url) => dependencies.normalizeDetectedUrl(url)) + .filter((url): url is string => Boolean(url)); + + const dedupedDetectedUrls = Array.from(new Set(normalizedDetectedUrls)).filter( + (url, _, urls) => + !urls.some((otherUrl) => otherUrl !== url && otherUrl.startsWith(url)) + ); + + dedupedDetectedUrls.forEach((url) => emitAuthUrl(url, false)); + + if ( + dependencies.shouldAutoOpenUrlFromOutput(cleanChunk) && + dedupedDetectedUrls.length > 0 + ) { + const bestUrl = dedupedDetectedUrls.reduce((longest, current) => + current.length > longest.length ? current : longest + ); + emitAuthUrl(bestUrl, true); + } + + session.ws.send( + JSON.stringify({ + type: 'output', + data: outputData, + }) + ); + } + }); + + shellProcess.onExit((exitCode) => { + if (!ptySessionKey) { + return; + } + + const session = ptySessionsMap.get(ptySessionKey); + if (session && session.ws && session.ws.readyState === WebSocket.OPEN) { + session.ws.send( + JSON.stringify({ + type: 'output', + data: `\r\n\x1b[33mProcess exited with code ${exitCode.exitCode}${ + exitCode.signal != null ? ` (${exitCode.signal})` : '' + }\x1b[0m\r\n`, + }) + ); + } + + if (session?.timeoutId) { + clearTimeout(session.timeoutId); + } + + ptySessionsMap.delete(ptySessionKey); + shellProcess = null; + }); + + let welcomeMsg = `\x1b[36mStarting terminal in: ${projectPath}\x1b[0m\r\n`; + if (!isPlainShell) { + const providerName = + provider === 'cursor' + ? 'Cursor' + : provider === 'codex' + ? 'Codex' + : provider === 'gemini' + ? 'Gemini' + : 'Claude'; + welcomeMsg = hasSession + ? `\x1b[36mResuming ${providerName} session ${sessionId} in: ${projectPath}\x1b[0m\r\n` + : `\x1b[36mStarting new ${providerName} session in: ${projectPath}\x1b[0m\r\n`; + } + + ws.send( + JSON.stringify({ + type: 'output', + data: welcomeMsg, + }) + ); + return; + } + + if (data.type === 'input') { + if (shellProcess) { + shellProcess.write(readString(data.data)); + } + return; + } + + if (data.type === 'resize') { + if (shellProcess) { + shellProcess.resize(readNumber(data.cols, 80), readNumber(data.rows, 24)); + } + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error('[ERROR] Shell WebSocket error:', message); + if (ws.readyState === WebSocket.OPEN) { + ws.send( + JSON.stringify({ + type: 'output', + data: `\r\n\x1b[31mError: ${message}\x1b[0m\r\n`, + }) + ); + } + } + }); + + ws.on('close', () => { + if (!ptySessionKey) { + return; + } + + const session = ptySessionsMap.get(ptySessionKey); + if (!session) { + return; + } + + session.ws = null; + session.timeoutId = setTimeout(() => { + session.pty.kill(); + ptySessionsMap.delete(ptySessionKey as string); + }, PTY_SESSION_TIMEOUT); + }); + + ws.on('error', (error) => { + console.error('[ERROR] Shell WebSocket error:', error); + }); +} diff --git a/server/modules/websocket/services/websocket-auth.service.ts b/server/modules/websocket/services/websocket-auth.service.ts new file mode 100644 index 00000000..bd689d5d --- /dev/null +++ b/server/modules/websocket/services/websocket-auth.service.ts @@ -0,0 +1,54 @@ +import type { VerifyClientCallbackSync } from 'ws'; + +import type { AuthenticatedWebSocketRequest } from '@/shared/types.js'; + +type WebSocketAuthDependencies = { + isPlatform: boolean; + authenticateWebSocket: (token: string | null) => { + id?: string | number; + userId?: string | number; + username?: string; + [key: string]: unknown; + } | null; +}; + +/** + * Authenticates websocket upgrade requests before the `connection` handler runs. + */ +export function verifyWebSocketClient( + info: Parameters>[0], + dependencies: WebSocketAuthDependencies +): boolean { + const request = info.req as AuthenticatedWebSocketRequest; + console.log('WebSocket connection attempt to:', request.url); + + // Platform mode: use the first DB user and skip token checks. + if (dependencies.isPlatform) { + const user = dependencies.authenticateWebSocket(null); + if (!user) { + console.log('[WARN] Platform mode: No user found in database'); + return false; + } + + request.user = user; + console.log('[OK] Platform mode WebSocket authenticated for user:', user.username); + return true; + } + + // OSS mode: read JWT from query string first, then Authorization header. + const upgradeUrl = new URL(request.url ?? '/', 'http://localhost'); + const token = + upgradeUrl.searchParams.get('token') ?? + request.headers.authorization?.split(' ')[1] ?? + null; + + const user = dependencies.authenticateWebSocket(token); + if (!user) { + console.log('[WARN] WebSocket authentication failed'); + return false; + } + + request.user = user; + console.log('[OK] WebSocket authenticated for user:', user.username); + return true; +} diff --git a/server/modules/websocket/services/websocket-server.service.ts b/server/modules/websocket/services/websocket-server.service.ts new file mode 100644 index 00000000..7e5c12e4 --- /dev/null +++ b/server/modules/websocket/services/websocket-server.service.ts @@ -0,0 +1,58 @@ +import type { Server as HttpServer } from 'node:http'; + +import { WebSocketServer, type VerifyClientCallbackSync } from 'ws'; + +import { handleChatConnection } from '@/modules/websocket/services/chat-websocket.service.js'; +import { verifyWebSocketClient } from '@/modules/websocket/services/websocket-auth.service.js'; +import { handlePluginWsProxy } from '@/modules/websocket/services/plugin-websocket-proxy.service.js'; +import { handleShellConnection } from '@/modules/websocket/services/shell-websocket.service.js'; +import type { AuthenticatedWebSocketRequest } from '@/shared/types.js'; + +type WebSocketServerDependencies = { + verifyClient: Parameters[1]; + chat: Parameters[2]; + shell: Parameters[1]; + getPluginPort: Parameters[2]; +}; + +/** + * Creates and wires the server-wide websocket gateway used for chat, shell, and + * plugin proxy routes. + */ +export function createWebSocketServer( + server: HttpServer, + dependencies: WebSocketServerDependencies +): WebSocketServer { + const wss = new WebSocketServer({ + server, + verifyClient: (( + info: Parameters>[0] + ) => verifyWebSocketClient(info, dependencies.verifyClient)), + }); + + wss.on('connection', (ws, request) => { + const incomingRequest = request as AuthenticatedWebSocketRequest; + const url = incomingRequest.url ?? '/'; + const pathname = new URL(url, 'http://localhost').pathname; + + if (pathname === '/shell') { + handleShellConnection(ws, dependencies.shell); + return; + } + + if (pathname === '/ws') { + handleChatConnection(ws, incomingRequest, dependencies.chat); + return; + } + + if (pathname.startsWith('/plugin-ws/')) { + handlePluginWsProxy(ws, pathname, dependencies.getPluginPort); + return; + } + + console.log('[WARN] Unknown WebSocket path:', pathname); + ws.close(); + }); + + return wss; +} diff --git a/server/modules/websocket/services/websocket-state.service.ts b/server/modules/websocket/services/websocket-state.service.ts new file mode 100644 index 00000000..3cffce24 --- /dev/null +++ b/server/modules/websocket/services/websocket-state.service.ts @@ -0,0 +1,16 @@ +import type { RealtimeClientConnection } from '@/shared/types.js'; + +/** + * Numeric readyState for an open WebSocket connection. + * + * We keep this in module state so services that broadcast updates do not need + * to import `ws` directly just to compare open/closed state. + */ +export const WS_OPEN_STATE = 1; + +/** + * Shared registry of active chat WebSocket connections. + * + * Project/session services publish realtime updates by iterating this set. + */ +export const connectedClients = new Set(); diff --git a/server/modules/websocket/services/websocket-writer.service.ts b/server/modules/websocket/services/websocket-writer.service.ts new file mode 100644 index 00000000..af307ad6 --- /dev/null +++ b/server/modules/websocket/services/websocket-writer.service.ts @@ -0,0 +1,38 @@ +import { WS_OPEN_STATE } from '@/modules/websocket/services/websocket-state.service.js'; +import type { RealtimeClientConnection } from '@/shared/types.js'; + +/** + * Thin transport adapter that gives WebSocket connections the same interface as + * SSE writers used by API routes (`send`, `setSessionId`, `getSessionId`). + */ +export class WebSocketWriter { + ws: RealtimeClientConnection; + sessionId: string | null; + userId: string | number | null; + isWebSocketWriter: boolean; + + constructor(ws: RealtimeClientConnection, userId: string | number | null = null) { + this.ws = ws; + this.sessionId = null; + this.userId = userId; + this.isWebSocketWriter = true; + } + + send(data: unknown): void { + if (this.ws.readyState === WS_OPEN_STATE) { + this.ws.send(JSON.stringify(data)); + } + } + + updateWebSocket(newRawWs: RealtimeClientConnection): void { + this.ws = newRawWs; + } + + setSessionId(sessionId: string): void { + this.sessionId = sessionId; + } + + getSessionId(): string | null { + return this.sessionId; + } +} diff --git a/server/projects.js b/server/projects.js deleted file mode 100755 index 67184af0..00000000 --- a/server/projects.js +++ /dev/null @@ -1,2555 +0,0 @@ -/** - * PROJECT DISCOVERY AND MANAGEMENT SYSTEM - * ======================================== - * - * This module manages project discovery for both Claude CLI and Cursor CLI sessions. - * - * ## Architecture Overview - * - * 1. **Claude Projects** (stored in ~/.claude/projects/) - * - Each project is a directory named with the project path encoded (/ replaced with -) - * - Contains .jsonl files with conversation history including 'cwd' field - * - Project metadata stored in ~/.claude/project-config.json - * - * 2. **Cursor Projects** (stored in ~/.cursor/chats/) - * - Each project directory is named with MD5 hash of the absolute project path - * - Example: /Users/john/myproject -> MD5 -> a1b2c3d4e5f6... - * - Contains session directories with SQLite databases (store.db) - * - Project path is NOT stored in the database - only in the MD5 hash - * - * ## Project Discovery Strategy - * - * 1. **Claude Projects Discovery**: - * - Scan ~/.claude/projects/ directory for Claude project folders - * - Extract actual project path from .jsonl files (cwd field) - * - Fall back to decoded directory name if no sessions exist - * - * 2. **Cursor Sessions Discovery**: - * - For each KNOWN project (from Claude or manually added) - * - Compute MD5 hash of the project's absolute path - * - Check if ~/.cursor/chats/{md5_hash}/ directory exists - * - Read session metadata from SQLite store.db files - * - * 3. **Manual Project Addition**: - * - Users can manually add project paths via UI - * - Stored in ~/.claude/project-config.json with 'manuallyAdded' flag - * - Allows discovering Cursor sessions for projects without Claude sessions - * - * ## Critical Limitations - * - * - **CANNOT discover Cursor-only projects**: From a quick check, there was no mention of - * the cwd of each project. if someone has the time, you can try to reverse engineer it. - * - * - **Project relocation breaks history**: If a project directory is moved or renamed, - * the MD5 hash changes, making old Cursor sessions inaccessible unless the old - * path is known and manually added. - * - * ## Error Handling - * - * - Missing ~/.claude directory is handled gracefully with automatic creation - * - ENOENT errors are caught and handled without crashing - * - Empty arrays returned when no projects/sessions exist - * - * ## Caching Strategy - * - * - Project directory extraction is cached to minimize file I/O - * - Cache is cleared when project configuration changes - * - Session data is fetched on-demand, not cached - */ - -import { promises as fs } from 'fs'; -import fsSync from 'fs'; -import path from 'path'; -import readline from 'readline'; -import crypto from 'crypto'; -import Database from 'better-sqlite3'; -import os from 'os'; -import sessionManager from './sessionManager.js'; -import { applyCustomSessionNames } from './database/db.js'; - -// Import TaskMaster detection functions -async function detectTaskMasterFolder(projectPath) { - try { - const taskMasterPath = path.join(projectPath, '.taskmaster'); - - // Check if .taskmaster directory exists - try { - const stats = await fs.stat(taskMasterPath); - if (!stats.isDirectory()) { - return { - hasTaskmaster: false, - reason: '.taskmaster exists but is not a directory' - }; - } - } catch (error) { - if (error.code === 'ENOENT') { - return { - hasTaskmaster: false, - reason: '.taskmaster directory not found' - }; - } - throw error; - } - - // Check for key TaskMaster files - const keyFiles = [ - 'tasks/tasks.json', - 'config.json' - ]; - - const fileStatus = {}; - let hasEssentialFiles = true; - - for (const file of keyFiles) { - const filePath = path.join(taskMasterPath, file); - try { - await fs.access(filePath); - fileStatus[file] = true; - } catch (error) { - fileStatus[file] = false; - if (file === 'tasks/tasks.json') { - hasEssentialFiles = false; - } - } - } - - // Parse tasks.json if it exists for metadata - let taskMetadata = null; - if (fileStatus['tasks/tasks.json']) { - try { - const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json'); - const tasksContent = await fs.readFile(tasksPath, 'utf8'); - const tasksData = JSON.parse(tasksContent); - - // Handle both tagged and legacy formats - let tasks = []; - if (tasksData.tasks) { - // Legacy format - tasks = tasksData.tasks; - } else { - // Tagged format - get tasks from all tags - Object.values(tasksData).forEach(tagData => { - if (tagData.tasks) { - tasks = tasks.concat(tagData.tasks); - } - }); - } - - // Calculate task statistics - const stats = tasks.reduce((acc, task) => { - acc.total++; - acc[task.status] = (acc[task.status] || 0) + 1; - - // Count subtasks - if (task.subtasks) { - task.subtasks.forEach(subtask => { - acc.subtotalTasks++; - acc.subtasks = acc.subtasks || {}; - acc.subtasks[subtask.status] = (acc.subtasks[subtask.status] || 0) + 1; - }); - } - - return acc; - }, { - total: 0, - subtotalTasks: 0, - pending: 0, - 'in-progress': 0, - done: 0, - review: 0, - deferred: 0, - cancelled: 0, - subtasks: {} - }); - - taskMetadata = { - taskCount: stats.total, - subtaskCount: stats.subtotalTasks, - completed: stats.done || 0, - pending: stats.pending || 0, - inProgress: stats['in-progress'] || 0, - review: stats.review || 0, - completionPercentage: stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0, - lastModified: (await fs.stat(tasksPath)).mtime.toISOString() - }; - } catch (parseError) { - console.warn('Failed to parse tasks.json:', parseError.message); - taskMetadata = { error: 'Failed to parse tasks.json' }; - } - } - - return { - hasTaskmaster: true, - hasEssentialFiles, - files: fileStatus, - metadata: taskMetadata, - path: taskMasterPath - }; - - } catch (error) { - console.error('Error detecting TaskMaster folder:', error); - return { - hasTaskmaster: false, - reason: `Error checking directory: ${error.message}` - }; - } -} - -// Cache for extracted project directories -const projectDirectoryCache = new Map(); - -// Clear cache when needed (called when project files change) -function clearProjectDirectoryCache() { - projectDirectoryCache.clear(); -} - -// Load project configuration file -async function loadProjectConfig() { - const configPath = path.join(os.homedir(), '.claude', 'project-config.json'); - try { - const configData = await fs.readFile(configPath, 'utf8'); - return JSON.parse(configData); - } catch (error) { - // Return empty config if file doesn't exist - return {}; - } -} - -// Save project configuration file -async function saveProjectConfig(config) { - const claudeDir = path.join(os.homedir(), '.claude'); - const configPath = path.join(claudeDir, 'project-config.json'); - - // Ensure the .claude directory exists - try { - await fs.mkdir(claudeDir, { recursive: true }); - } catch (error) { - if (error.code !== 'EEXIST') { - throw error; - } - } - - await fs.writeFile(configPath, JSON.stringify(config, null, 2), 'utf8'); -} - -// Generate better display name from path -async function generateDisplayName(projectName, actualProjectDir = null) { - // Use actual project directory if provided, otherwise decode from project name - let projectPath = actualProjectDir || projectName.replace(/-/g, '/'); - - // Try to read package.json from the project path - try { - const packageJsonPath = path.join(projectPath, 'package.json'); - const packageData = await fs.readFile(packageJsonPath, 'utf8'); - const packageJson = JSON.parse(packageData); - - // Return the name from package.json if it exists - if (packageJson.name) { - return packageJson.name; - } - } catch (error) { - // Fall back to path-based naming if package.json doesn't exist or can't be read - } - - // If it starts with /, it's an absolute path - if (projectPath.startsWith('/')) { - const parts = projectPath.split('/').filter(Boolean); - // Return only the last folder name - return parts[parts.length - 1] || projectPath; - } - - return projectPath; -} - -// Extract the actual project directory from JSONL sessions (with caching) -async function extractProjectDirectory(projectName) { - // Check cache first - if (projectDirectoryCache.has(projectName)) { - return projectDirectoryCache.get(projectName); - } - - // Check project config for originalPath (manually added projects via UI or platform) - // This handles projects with dashes in their directory names correctly - const config = await loadProjectConfig(); - if (config[projectName]?.originalPath) { - const originalPath = config[projectName].originalPath; - projectDirectoryCache.set(projectName, originalPath); - return originalPath; - } - - const projectDir = path.join(os.homedir(), '.claude', 'projects', projectName); - const cwdCounts = new Map(); - let latestTimestamp = 0; - let latestCwd = null; - let extractedPath; - - try { - // Check if the project directory exists - await fs.access(projectDir); - - const files = await fs.readdir(projectDir); - const jsonlFiles = files.filter(file => file.endsWith('.jsonl')); - - if (jsonlFiles.length === 0) { - // Fall back to decoded project name if no sessions - extractedPath = projectName.replace(/-/g, '/'); - } else { - // Process all JSONL files to collect cwd values - for (const file of jsonlFiles) { - const jsonlFile = path.join(projectDir, file); - const fileStream = fsSync.createReadStream(jsonlFile); - const rl = readline.createInterface({ - input: fileStream, - crlfDelay: Infinity - }); - - for await (const line of rl) { - if (line.trim()) { - try { - const entry = JSON.parse(line); - - if (entry.cwd) { - // Count occurrences of each cwd - cwdCounts.set(entry.cwd, (cwdCounts.get(entry.cwd) || 0) + 1); - - // Track the most recent cwd - const timestamp = new Date(entry.timestamp || 0).getTime(); - if (timestamp > latestTimestamp) { - latestTimestamp = timestamp; - latestCwd = entry.cwd; - } - } - } catch (parseError) { - // Skip malformed lines - } - } - } - } - - // Determine the best cwd to use - if (cwdCounts.size === 0) { - // No cwd found, fall back to decoded project name - extractedPath = projectName.replace(/-/g, '/'); - } else if (cwdCounts.size === 1) { - // Only one cwd, use it - extractedPath = Array.from(cwdCounts.keys())[0]; - } else { - // Multiple cwd values - prefer the most recent one if it has reasonable usage - const mostRecentCount = cwdCounts.get(latestCwd) || 0; - const maxCount = Math.max(...cwdCounts.values()); - - // Use most recent if it has at least 25% of the max count - if (mostRecentCount >= maxCount * 0.25) { - extractedPath = latestCwd; - } else { - // Otherwise use the most frequently used cwd - for (const [cwd, count] of cwdCounts.entries()) { - if (count === maxCount) { - extractedPath = cwd; - break; - } - } - } - - // Fallback (shouldn't reach here) - if (!extractedPath) { - extractedPath = latestCwd || projectName.replace(/-/g, '/'); - } - } - } - - // Cache the result - projectDirectoryCache.set(projectName, extractedPath); - - return extractedPath; - - } catch (error) { - // If the directory doesn't exist, just use the decoded project name - if (error.code === 'ENOENT') { - extractedPath = projectName.replace(/-/g, '/'); - } else { - console.error(`Error extracting project directory for ${projectName}:`, error); - // Fall back to decoded project name for other errors - extractedPath = projectName.replace(/-/g, '/'); - } - - // Cache the fallback result too - projectDirectoryCache.set(projectName, extractedPath); - - return extractedPath; - } -} - -async function getProjects(progressCallback = null) { - const claudeDir = path.join(os.homedir(), '.claude', 'projects'); - const config = await loadProjectConfig(); - const projects = []; - const existingProjects = new Set(); - const codexSessionsIndexRef = { sessionsByProject: null }; - let totalProjects = 0; - let processedProjects = 0; - let directories = []; - - try { - // Check if the .claude/projects directory exists - await fs.access(claudeDir); - - // First, get existing Claude projects from the file system - const entries = await fs.readdir(claudeDir, { withFileTypes: true }); - directories = entries.filter(e => e.isDirectory()); - - // Build set of existing project names for later - directories.forEach(e => existingProjects.add(e.name)); - - // Count manual projects not already in directories - const manualProjectsCount = Object.entries(config) - .filter(([name, cfg]) => cfg.manuallyAdded && !existingProjects.has(name)) - .length; - - totalProjects = directories.length + manualProjectsCount; - - for (const entry of directories) { - processedProjects++; - - // Emit progress - if (progressCallback) { - progressCallback({ - phase: 'loading', - current: processedProjects, - total: totalProjects, - currentProject: entry.name - }); - } - - // Extract actual project directory from JSONL sessions - const actualProjectDir = await extractProjectDirectory(entry.name); - - // Get display name from config or generate one - const customName = config[entry.name]?.displayName; - const autoDisplayName = await generateDisplayName(entry.name, actualProjectDir); - const fullPath = actualProjectDir; - - const project = { - name: entry.name, - path: actualProjectDir, - displayName: customName || autoDisplayName, - fullPath: fullPath, - isCustomName: !!customName, - sessions: [], - geminiSessions: [], - sessionMeta: { - hasMore: false, - total: 0 - } - }; - - // Try to get sessions for this project (just first 5 for performance) - try { - const sessionResult = await getSessions(entry.name, 5, 0); - project.sessions = sessionResult.sessions || []; - project.sessionMeta = { - hasMore: sessionResult.hasMore, - total: sessionResult.total - }; - } catch (e) { - console.warn(`Could not load sessions for project ${entry.name}:`, e.message); - project.sessionMeta = { - hasMore: false, - total: 0 - }; - } - applyCustomSessionNames(project.sessions, 'claude'); - - // Also fetch Cursor sessions for this project - try { - project.cursorSessions = await getCursorSessions(actualProjectDir); - } catch (e) { - console.warn(`Could not load Cursor sessions for project ${entry.name}:`, e.message); - project.cursorSessions = []; - } - applyCustomSessionNames(project.cursorSessions, 'cursor'); - - // Also fetch Codex sessions for this project - try { - project.codexSessions = await getCodexSessions(actualProjectDir, { - indexRef: codexSessionsIndexRef, - }); - } catch (e) { - console.warn(`Could not load Codex sessions for project ${entry.name}:`, e.message); - project.codexSessions = []; - } - applyCustomSessionNames(project.codexSessions, 'codex'); - - // Also fetch Gemini sessions for this project (UI + CLI) - try { - const uiSessions = sessionManager.getProjectSessions(actualProjectDir) || []; - const cliSessions = await getGeminiCliSessions(actualProjectDir); - const uiIds = new Set(uiSessions.map(s => s.id)); - const mergedGemini = [...uiSessions, ...cliSessions.filter(s => !uiIds.has(s.id))]; - project.geminiSessions = mergedGemini; - } catch (e) { - console.warn(`Could not load Gemini sessions for project ${entry.name}:`, e.message); - project.geminiSessions = []; - } - applyCustomSessionNames(project.geminiSessions, 'gemini'); - - // Add TaskMaster detection - try { - const taskMasterResult = await detectTaskMasterFolder(actualProjectDir); - project.taskmaster = { - hasTaskmaster: taskMasterResult.hasTaskmaster, - hasEssentialFiles: taskMasterResult.hasEssentialFiles, - metadata: taskMasterResult.metadata, - status: taskMasterResult.hasTaskmaster && taskMasterResult.hasEssentialFiles ? 'configured' : 'not-configured' - }; - } catch (e) { - console.warn(`Could not detect TaskMaster for project ${entry.name}:`, e.message); - project.taskmaster = { - hasTaskmaster: false, - hasEssentialFiles: false, - metadata: null, - status: 'error' - }; - } - - projects.push(project); - } - } catch (error) { - // If the directory doesn't exist (ENOENT), that's okay - just continue with empty projects - if (error.code !== 'ENOENT') { - console.error('Error reading projects directory:', error); - } - // Calculate total for manual projects only (no directories exist) - totalProjects = Object.entries(config) - .filter(([name, cfg]) => cfg.manuallyAdded) - .length; - } - - // Add manually configured projects that don't exist as folders yet - for (const [projectName, projectConfig] of Object.entries(config)) { - if (!existingProjects.has(projectName) && projectConfig.manuallyAdded) { - processedProjects++; - - // Emit progress for manual projects - if (progressCallback) { - progressCallback({ - phase: 'loading', - current: processedProjects, - total: totalProjects, - currentProject: projectName - }); - } - - // Use the original path if available, otherwise extract from potential sessions - let actualProjectDir = projectConfig.originalPath; - - if (!actualProjectDir) { - try { - actualProjectDir = await extractProjectDirectory(projectName); - } catch (error) { - // Fall back to decoded project name - actualProjectDir = projectName.replace(/-/g, '/'); - } - } - - const project = { - name: projectName, - path: actualProjectDir, - displayName: projectConfig.displayName || await generateDisplayName(projectName, actualProjectDir), - fullPath: actualProjectDir, - isCustomName: !!projectConfig.displayName, - isManuallyAdded: true, - sessions: [], - geminiSessions: [], - sessionMeta: { - hasMore: false, - total: 0 - }, - cursorSessions: [], - codexSessions: [] - }; - - // Try to fetch Cursor sessions for manual projects too - try { - project.cursorSessions = await getCursorSessions(actualProjectDir); - } catch (e) { - console.warn(`Could not load Cursor sessions for manual project ${projectName}:`, e.message); - } - applyCustomSessionNames(project.cursorSessions, 'cursor'); - - // Try to fetch Codex sessions for manual projects too - try { - project.codexSessions = await getCodexSessions(actualProjectDir, { - indexRef: codexSessionsIndexRef, - }); - } catch (e) { - console.warn(`Could not load Codex sessions for manual project ${projectName}:`, e.message); - } - applyCustomSessionNames(project.codexSessions, 'codex'); - - // Try to fetch Gemini sessions for manual projects too (UI + CLI) - try { - const uiSessions = sessionManager.getProjectSessions(actualProjectDir) || []; - const cliSessions = await getGeminiCliSessions(actualProjectDir); - const uiIds = new Set(uiSessions.map(s => s.id)); - project.geminiSessions = [...uiSessions, ...cliSessions.filter(s => !uiIds.has(s.id))]; - } catch (e) { - console.warn(`Could not load Gemini sessions for manual project ${projectName}:`, e.message); - } - applyCustomSessionNames(project.geminiSessions, 'gemini'); - - // Add TaskMaster detection for manual projects - try { - const taskMasterResult = await detectTaskMasterFolder(actualProjectDir); - - // Determine TaskMaster status - let taskMasterStatus = 'not-configured'; - if (taskMasterResult.hasTaskmaster && taskMasterResult.hasEssentialFiles) { - taskMasterStatus = 'taskmaster-only'; // We don't check MCP for manual projects in bulk - } - - project.taskmaster = { - status: taskMasterStatus, - hasTaskmaster: taskMasterResult.hasTaskmaster, - hasEssentialFiles: taskMasterResult.hasEssentialFiles, - metadata: taskMasterResult.metadata - }; - } catch (error) { - console.warn(`TaskMaster detection failed for manual project ${projectName}:`, error.message); - project.taskmaster = { - status: 'error', - hasTaskmaster: false, - hasEssentialFiles: false, - error: error.message - }; - } - - projects.push(project); - } - } - - // Emit completion after all projects (including manual) are processed - if (progressCallback) { - progressCallback({ - phase: 'complete', - current: totalProjects, - total: totalProjects - }); - } - - return projects; -} - -async function getSessions(projectName, limit = 5, offset = 0) { - const projectDir = path.join(os.homedir(), '.claude', 'projects', projectName); - - try { - const files = await fs.readdir(projectDir); - // agent-*.jsonl files contain session start data at this point. This needs to be revisited - // periodically to make sure only accurate data is there and no new functionality is added there - const jsonlFiles = files.filter(file => file.endsWith('.jsonl') && !file.startsWith('agent-')); - - if (jsonlFiles.length === 0) { - return { sessions: [], hasMore: false, total: 0 }; - } - - // Sort files by modification time (newest first) - const filesWithStats = await Promise.all( - jsonlFiles.map(async (file) => { - const filePath = path.join(projectDir, file); - const stats = await fs.stat(filePath); - return { file, mtime: stats.mtime }; - }) - ); - filesWithStats.sort((a, b) => b.mtime - a.mtime); - - const allSessions = new Map(); - const allEntries = []; - const uuidToSessionMap = new Map(); - - // Collect all sessions and entries from all files - for (const { file } of filesWithStats) { - const jsonlFile = path.join(projectDir, file); - const result = await parseJsonlSessions(jsonlFile); - - result.sessions.forEach(session => { - if (!allSessions.has(session.id)) { - allSessions.set(session.id, session); - } - }); - - allEntries.push(...result.entries); - - // Early exit optimization for large projects - if (allSessions.size >= (limit + offset) * 2 && allEntries.length >= Math.min(3, filesWithStats.length)) { - break; - } - } - - // Build UUID-to-session mapping for timeline detection - allEntries.forEach(entry => { - if (entry.uuid && entry.sessionId) { - uuidToSessionMap.set(entry.uuid, entry.sessionId); - } - }); - - // Group sessions by first user message ID - const sessionGroups = new Map(); // firstUserMsgId -> { latestSession, allSessions[] } - const sessionToFirstUserMsgId = new Map(); // sessionId -> firstUserMsgId - - // Find the first user message for each session - allEntries.forEach(entry => { - if (entry.sessionId && entry.type === 'user' && entry.parentUuid === null && entry.uuid) { - // This is a first user message in a session (parentUuid is null) - const firstUserMsgId = entry.uuid; - - if (!sessionToFirstUserMsgId.has(entry.sessionId)) { - sessionToFirstUserMsgId.set(entry.sessionId, firstUserMsgId); - - const session = allSessions.get(entry.sessionId); - if (session) { - if (!sessionGroups.has(firstUserMsgId)) { - sessionGroups.set(firstUserMsgId, { - latestSession: session, - allSessions: [session] - }); - } else { - const group = sessionGroups.get(firstUserMsgId); - group.allSessions.push(session); - - // Update latest session if this one is more recent - if (new Date(session.lastActivity) > new Date(group.latestSession.lastActivity)) { - group.latestSession = session; - } - } - } - } - } - }); - - // Collect all sessions that don't belong to any group (standalone sessions) - const groupedSessionIds = new Set(); - sessionGroups.forEach(group => { - group.allSessions.forEach(session => groupedSessionIds.add(session.id)); - }); - - const standaloneSessionsArray = Array.from(allSessions.values()) - .filter(session => !groupedSessionIds.has(session.id)); - - // Combine grouped sessions (only show latest from each group) + standalone sessions - const latestFromGroups = Array.from(sessionGroups.values()).map(group => { - const session = { ...group.latestSession }; - // Add metadata about grouping - if (group.allSessions.length > 1) { - session.isGrouped = true; - session.groupSize = group.allSessions.length; - session.groupSessions = group.allSessions.map(s => s.id); - } - return session; - }); - const visibleSessions = [...latestFromGroups, ...standaloneSessionsArray] - .filter(session => !session.summary.startsWith('{ "')) - .sort((a, b) => new Date(b.lastActivity) - new Date(a.lastActivity)); - - const total = visibleSessions.length; - const paginatedSessions = visibleSessions.slice(offset, offset + limit); - const hasMore = offset + limit < total; - - return { - sessions: paginatedSessions, - hasMore, - total, - offset, - limit - }; - } catch (error) { - console.error(`Error reading sessions for project ${projectName}:`, error); - return { sessions: [], hasMore: false, total: 0 }; - } -} - -async function parseJsonlSessions(filePath) { - const sessions = new Map(); - const entries = []; - const pendingSummaries = new Map(); // leafUuid -> summary for entries without sessionId - - try { - const fileStream = fsSync.createReadStream(filePath); - const rl = readline.createInterface({ - input: fileStream, - crlfDelay: Infinity - }); - - for await (const line of rl) { - if (line.trim()) { - try { - const entry = JSON.parse(line); - entries.push(entry); - - // Handle summary entries that don't have sessionId yet - if (entry.type === 'summary' && entry.summary && !entry.sessionId && entry.leafUuid) { - pendingSummaries.set(entry.leafUuid, entry.summary); - } - - if (entry.sessionId) { - if (!sessions.has(entry.sessionId)) { - sessions.set(entry.sessionId, { - id: entry.sessionId, - summary: 'New Session', - messageCount: 0, - lastActivity: new Date(), - cwd: entry.cwd || '', - lastUserMessage: null, - lastAssistantMessage: null - }); - } - - const session = sessions.get(entry.sessionId); - - // Apply pending summary if this entry has a parentUuid that matches a pending summary - if (session.summary === 'New Session' && entry.parentUuid && pendingSummaries.has(entry.parentUuid)) { - session.summary = pendingSummaries.get(entry.parentUuid); - } - - // Update summary from summary entries with sessionId - if (entry.type === 'summary' && entry.summary) { - session.summary = entry.summary; - } - - // Track last user and assistant messages (skip system messages) - if (entry.message?.role === 'user' && entry.message?.content) { - const content = entry.message.content; - - // Extract text from array format if needed - let textContent = content; - if (Array.isArray(content) && content.length > 0 && content[0].type === 'text') { - textContent = content[0].text; - } - - const isSystemMessage = typeof textContent === 'string' && ( - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('Caveat:') || - textContent.startsWith('This session is being continued from a previous') || - textContent.startsWith('Invalid API key') || - textContent.includes('{"subtasks":') || // Filter Task Master prompts - textContent.includes('CRITICAL: You MUST respond with ONLY a JSON') || // Filter Task Master system prompts - textContent === 'Warmup' // Explicitly filter out "Warmup" - ); - - if (typeof textContent === 'string' && textContent.length > 0 && !isSystemMessage) { - session.lastUserMessage = textContent; - } - } else if (entry.message?.role === 'assistant' && entry.message?.content) { - // Skip API error messages using the isApiErrorMessage flag - if (entry.isApiErrorMessage === true) { - // Skip this message entirely - } else { - // Track last assistant text message - let assistantText = null; - - if (Array.isArray(entry.message.content)) { - for (const part of entry.message.content) { - if (part.type === 'text' && part.text) { - assistantText = part.text; - } - } - } else if (typeof entry.message.content === 'string') { - assistantText = entry.message.content; - } - - // Additional filter for assistant messages with system content - const isSystemAssistantMessage = typeof assistantText === 'string' && ( - assistantText.startsWith('Invalid API key') || - assistantText.includes('{"subtasks":') || - assistantText.includes('CRITICAL: You MUST respond with ONLY a JSON') - ); - - if (assistantText && !isSystemAssistantMessage) { - session.lastAssistantMessage = assistantText; - } - } - } - - session.messageCount++; - - if (entry.timestamp) { - session.lastActivity = new Date(entry.timestamp); - } - } - } catch (parseError) { - // Skip malformed lines silently - } - } - } - - // After processing all entries, set final summary based on last message if no summary exists - for (const session of sessions.values()) { - if (session.summary === 'New Session') { - // Prefer last user message, fall back to last assistant message - const lastMessage = session.lastUserMessage || session.lastAssistantMessage; - if (lastMessage) { - session.summary = lastMessage.length > 50 ? lastMessage.substring(0, 50) + '...' : lastMessage; - } - } - } - - // Filter out sessions that contain JSON responses (Task Master errors) - const allSessions = Array.from(sessions.values()); - const filteredSessions = allSessions.filter(session => { - const shouldFilter = session.summary.startsWith('{ "'); - if (shouldFilter) { - } - // Log a sample of summaries to debug - if (Math.random() < 0.01) { // Log 1% of sessions - } - return !shouldFilter; - }); - - - return { - sessions: filteredSessions, - entries: entries - }; - - } catch (error) { - console.error('Error reading JSONL file:', error); - return { sessions: [], entries: [] }; - } -} - -// Parse an agent JSONL file and extract tool uses -async function parseAgentTools(filePath) { - const tools = []; - - try { - const fileStream = fsSync.createReadStream(filePath); - const rl = readline.createInterface({ - input: fileStream, - crlfDelay: Infinity - }); - - for await (const line of rl) { - if (line.trim()) { - try { - const entry = JSON.parse(line); - // Look for assistant messages with tool_use - if (entry.message?.role === 'assistant' && Array.isArray(entry.message?.content)) { - for (const part of entry.message.content) { - if (part.type === 'tool_use') { - tools.push({ - toolId: part.id, - toolName: part.name, - toolInput: part.input, - timestamp: entry.timestamp - }); - } - } - } - // Look for tool results - if (entry.message?.role === 'user' && Array.isArray(entry.message?.content)) { - for (const part of entry.message.content) { - if (part.type === 'tool_result') { - // Find the matching tool and add result - const tool = tools.find(t => t.toolId === part.tool_use_id); - if (tool) { - tool.toolResult = { - content: typeof part.content === 'string' ? part.content : - Array.isArray(part.content) ? part.content.map(c => c.text || '').join('\n') : - JSON.stringify(part.content), - isError: Boolean(part.is_error) - }; - } - } - } - } - } catch (parseError) { - // Skip malformed lines - } - } - } - } catch (error) { - console.warn(`Error parsing agent file ${filePath}:`, error.message); - } - - return tools; -} - -// Get messages for a specific session with pagination support -async function getSessionMessages(projectName, sessionId, limit = null, offset = 0) { - const projectDir = path.join(os.homedir(), '.claude', 'projects', projectName); - - try { - const files = await fs.readdir(projectDir); - // agent-*.jsonl files contain subagent tool history - we'll process them separately - const jsonlFiles = files.filter(file => file.endsWith('.jsonl') && !file.startsWith('agent-')); - const agentFiles = files.filter(file => file.endsWith('.jsonl') && file.startsWith('agent-')); - - if (jsonlFiles.length === 0) { - return { messages: [], total: 0, hasMore: false }; - } - - const messages = []; - // Map of agentId -> tools for subagent tool grouping - const agentToolsCache = new Map(); - - // Process all JSONL files to find messages for this session - for (const file of jsonlFiles) { - const jsonlFile = path.join(projectDir, file); - const fileStream = fsSync.createReadStream(jsonlFile); - const rl = readline.createInterface({ - input: fileStream, - crlfDelay: Infinity - }); - - for await (const line of rl) { - if (line.trim()) { - try { - const entry = JSON.parse(line); - if (entry.sessionId === sessionId) { - messages.push(entry); - } - } catch (parseError) { - // Silently skip malformed JSONL lines (common with concurrent writes) - } - } - } - } - - // Collect agentIds from Task tool results - const agentIds = new Set(); - for (const message of messages) { - if (message.toolUseResult?.agentId) { - agentIds.add(message.toolUseResult.agentId); - } - } - - // Load agent tools for each agentId found - for (const agentId of agentIds) { - const agentFileName = `agent-${agentId}.jsonl`; - if (agentFiles.includes(agentFileName)) { - const agentFilePath = path.join(projectDir, agentFileName); - const tools = await parseAgentTools(agentFilePath); - agentToolsCache.set(agentId, tools); - } - } - - // Attach agent tools to their parent Task messages - for (const message of messages) { - if (message.toolUseResult?.agentId) { - const agentId = message.toolUseResult.agentId; - const agentTools = agentToolsCache.get(agentId); - if (agentTools && agentTools.length > 0) { - message.subagentTools = agentTools; - } - } - } - // Sort messages by timestamp - const sortedMessages = messages.sort((a, b) => - new Date(a.timestamp || 0) - new Date(b.timestamp || 0) - ); - - const total = sortedMessages.length; - - // If no limit is specified, return all messages (backward compatibility) - if (limit === null) { - return sortedMessages; - } - - // Apply pagination - for recent messages, we need to slice from the end - // offset 0 should give us the most recent messages - const startIndex = Math.max(0, total - offset - limit); - const endIndex = total - offset; - const paginatedMessages = sortedMessages.slice(startIndex, endIndex); - const hasMore = startIndex > 0; - - return { - messages: paginatedMessages, - total, - hasMore, - offset, - limit - }; - } catch (error) { - console.error(`Error reading messages for session ${sessionId}:`, error); - return limit === null ? [] : { messages: [], total: 0, hasMore: false }; - } -} - -// Rename a project's display name -async function renameProject(projectName, newDisplayName) { - const config = await loadProjectConfig(); - - if (!newDisplayName || newDisplayName.trim() === '') { - // Remove custom name if empty, will fall back to auto-generated - if (config[projectName]) { - delete config[projectName].displayName; - } - } else { - // Set custom display name, preserving other properties (manuallyAdded, originalPath) - config[projectName] = { - ...config[projectName], - displayName: newDisplayName.trim() - }; - } - - await saveProjectConfig(config); - return true; -} - -// Delete a session from a project -async function deleteSession(projectName, sessionId) { - const projectDir = path.join(os.homedir(), '.claude', 'projects', projectName); - - try { - const files = await fs.readdir(projectDir); - const jsonlFiles = files.filter(file => file.endsWith('.jsonl')); - - if (jsonlFiles.length === 0) { - throw new Error('No session files found for this project'); - } - - // Check all JSONL files to find which one contains the session - for (const file of jsonlFiles) { - const jsonlFile = path.join(projectDir, file); - const content = await fs.readFile(jsonlFile, 'utf8'); - const lines = content.split('\n').filter(line => line.trim()); - - // Check if this file contains the session - const hasSession = lines.some(line => { - try { - const data = JSON.parse(line); - return data.sessionId === sessionId; - } catch { - return false; - } - }); - - if (hasSession) { - // Filter out all entries for this session - const filteredLines = lines.filter(line => { - try { - const data = JSON.parse(line); - return data.sessionId !== sessionId; - } catch { - return true; // Keep malformed lines - } - }); - - // Write back the filtered content - await fs.writeFile(jsonlFile, filteredLines.join('\n') + (filteredLines.length > 0 ? '\n' : '')); - return true; - } - } - - throw new Error(`Session ${sessionId} not found in any files`); - } catch (error) { - console.error(`Error deleting session ${sessionId} from project ${projectName}:`, error); - throw error; - } -} - -// Check if a project is empty (has no sessions) -async function isProjectEmpty(projectName) { - try { - const sessionsResult = await getSessions(projectName, 1, 0); - return sessionsResult.total === 0; - } catch (error) { - console.error(`Error checking if project ${projectName} is empty:`, error); - return false; - } -} - -// Remove a project from the UI. -// When deleteData=true, also delete session/memory files on disk (destructive). -async function deleteProject(projectName, force = false, deleteData = false) { - const projectDir = path.join(os.homedir(), '.claude', 'projects', projectName); - - try { - const isEmpty = await isProjectEmpty(projectName); - if (!isEmpty && !force) { - throw new Error('Cannot delete project with existing sessions'); - } - - const config = await loadProjectConfig(); - - // Destructive path: delete underlying data when explicitly requested - if (deleteData) { - let projectPath = config[projectName]?.path || config[projectName]?.originalPath; - if (!projectPath) { - projectPath = await extractProjectDirectory(projectName); - } - - // Remove the Claude project directory (session logs, memory, subagent data) - await fs.rm(projectDir, { recursive: true, force: true }); - - // Delete Codex sessions associated with this project - if (projectPath) { - try { - const codexSessions = await getCodexSessions(projectPath, { limit: 0 }); - for (const session of codexSessions) { - try { - await deleteCodexSession(session.id); - } catch (err) { - console.warn(`Failed to delete Codex session ${session.id}:`, err.message); - } - } - } catch (err) { - console.warn('Failed to delete Codex sessions:', err.message); - } - - // Delete Cursor sessions directory if it exists - try { - const hash = crypto.createHash('md5').update(projectPath).digest('hex'); - const cursorProjectDir = path.join(os.homedir(), '.cursor', 'chats', hash); - await fs.rm(cursorProjectDir, { recursive: true, force: true }); - } catch (err) { - // Cursor dir may not exist, ignore - } - } - } - - // Always remove from project config - delete config[projectName]; - await saveProjectConfig(config); - - return true; - } catch (error) { - console.error(`Error removing project ${projectName}:`, error); - throw error; - } -} - -// Add a project manually to the config (without creating folders) -async function addProjectManually(projectPath, displayName = null) { - const absolutePath = path.resolve(projectPath); - - try { - // Check if the path exists - await fs.access(absolutePath); - } catch (error) { - throw new Error(`Path does not exist: ${absolutePath}`); - } - - // Generate project name (encode path for use as directory name) - const projectName = absolutePath.replace(/[\\/:\s~_]/g, '-'); - - // Check if project already exists in config - const config = await loadProjectConfig(); - const projectDir = path.join(os.homedir(), '.claude', 'projects', projectName); - - if (config[projectName]) { - throw new Error(`Project already configured for path: ${absolutePath}`); - } - - // Allow adding projects even if the directory exists - this enables tracking - // existing Claude Code or Cursor projects in the UI - - // Add to config as manually added project - config[projectName] = { - manuallyAdded: true, - originalPath: absolutePath - }; - - if (displayName) { - config[projectName].displayName = displayName; - } - - await saveProjectConfig(config); - - - return { - name: projectName, - path: absolutePath, - fullPath: absolutePath, - displayName: displayName || await generateDisplayName(projectName, absolutePath), - isManuallyAdded: true, - sessions: [], - cursorSessions: [] - }; -} - -// Fetch Cursor sessions for a given project path -async function getCursorSessions(projectPath) { - try { - // Calculate cwdID hash for the project path (Cursor uses MD5 hash) - const cwdId = crypto.createHash('md5').update(projectPath).digest('hex'); - const cursorChatsPath = path.join(os.homedir(), '.cursor', 'chats', cwdId); - - // Check if the directory exists - try { - await fs.access(cursorChatsPath); - } catch (error) { - // No sessions for this project - return []; - } - - // List all session directories - const sessionDirs = await fs.readdir(cursorChatsPath); - const sessions = []; - - for (const sessionId of sessionDirs) { - const sessionPath = path.join(cursorChatsPath, sessionId); - const storeDbPath = path.join(sessionPath, 'store.db'); - - try { - // Check if store.db exists - await fs.access(storeDbPath); - - // Capture store.db mtime as a reliable fallback timestamp - let dbStatMtimeMs = null; - try { - const stat = await fs.stat(storeDbPath); - dbStatMtimeMs = stat.mtimeMs; - } catch (_) { } - - // Open SQLite database - const db = new Database(storeDbPath, { readonly: true, fileMustExist: true }); - - // Get metadata from meta table - const metaRows = db.prepare('SELECT key, value FROM meta').all(); - - // Parse metadata - let metadata = {}; - for (const row of metaRows) { - if (row.value) { - try { - // Try to decode as hex-encoded JSON - const hexMatch = row.value.toString().match(/^[0-9a-fA-F]+$/); - if (hexMatch) { - const jsonStr = Buffer.from(row.value, 'hex').toString('utf8'); - metadata[row.key] = JSON.parse(jsonStr); - } else { - metadata[row.key] = row.value.toString(); - } - } catch (e) { - metadata[row.key] = row.value.toString(); - } - } - } - - // Get message count - const messageCountResult = db.prepare('SELECT COUNT(*) as count FROM blobs').get(); - - db.close(); - - // Extract session info - const sessionName = metadata.title || metadata.sessionTitle || 'Untitled Session'; - - // Determine timestamp - prefer createdAt from metadata, fall back to db file mtime - let createdAt = null; - if (metadata.createdAt) { - createdAt = new Date(metadata.createdAt).toISOString(); - } else if (dbStatMtimeMs) { - createdAt = new Date(dbStatMtimeMs).toISOString(); - } else { - createdAt = new Date().toISOString(); - } - - sessions.push({ - id: sessionId, - name: sessionName, - createdAt: createdAt, - lastActivity: createdAt, // For compatibility with Claude sessions - messageCount: messageCountResult.count || 0, - projectPath: projectPath - }); - - } catch (error) { - console.warn(`Could not read Cursor session ${sessionId}:`, error.message); - } - } - - // Sort sessions by creation time (newest first) - sessions.sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt)); - - // Return only the first 5 sessions for performance - return sessions.slice(0, 5); - - } catch (error) { - console.error('Error fetching Cursor sessions:', error); - return []; - } -} - - -function normalizeComparablePath(inputPath) { - if (!inputPath || typeof inputPath !== 'string') { - return ''; - } - - const withoutLongPathPrefix = inputPath.startsWith('\\\\?\\') - ? inputPath.slice(4) - : inputPath; - const normalized = path.normalize(withoutLongPathPrefix.trim()); - - if (!normalized) { - return ''; - } - - const resolved = path.resolve(normalized); - return process.platform === 'win32' ? resolved.toLowerCase() : resolved; -} - -async function findCodexJsonlFiles(dir) { - const files = []; - - try { - const entries = await fs.readdir(dir, { withFileTypes: true }); - for (const entry of entries) { - const fullPath = path.join(dir, entry.name); - if (entry.isDirectory()) { - files.push(...await findCodexJsonlFiles(fullPath)); - } else if (entry.name.endsWith('.jsonl')) { - files.push(fullPath); - } - } - } catch (error) { - // Skip directories we can't read - } - - return files; -} - -async function buildCodexSessionsIndex() { - const codexSessionsDir = path.join(os.homedir(), '.codex', 'sessions'); - const sessionsByProject = new Map(); - - try { - await fs.access(codexSessionsDir); - } catch (error) { - return sessionsByProject; - } - - const jsonlFiles = await findCodexJsonlFiles(codexSessionsDir); - - for (const filePath of jsonlFiles) { - try { - const sessionData = await parseCodexSessionFile(filePath); - if (!sessionData || !sessionData.id) { - continue; - } - - const normalizedProjectPath = normalizeComparablePath(sessionData.cwd); - if (!normalizedProjectPath) { - continue; - } - - const session = { - id: sessionData.id, - summary: sessionData.summary || 'Codex Session', - messageCount: sessionData.messageCount || 0, - lastActivity: sessionData.timestamp ? new Date(sessionData.timestamp) : new Date(), - cwd: sessionData.cwd, - model: sessionData.model, - filePath, - provider: 'codex', - }; - - if (!sessionsByProject.has(normalizedProjectPath)) { - sessionsByProject.set(normalizedProjectPath, []); - } - - sessionsByProject.get(normalizedProjectPath).push(session); - } catch (error) { - console.warn(`Could not parse Codex session file ${filePath}:`, error.message); - } - } - - for (const sessions of sessionsByProject.values()) { - sessions.sort((a, b) => new Date(b.lastActivity) - new Date(a.lastActivity)); - } - - return sessionsByProject; -} - -// Fetch Codex sessions for a given project path -async function getCodexSessions(projectPath, options = {}) { - const { limit = 5, indexRef = null } = options; - try { - const normalizedProjectPath = normalizeComparablePath(projectPath); - if (!normalizedProjectPath) { - return []; - } - - if (indexRef && !indexRef.sessionsByProject) { - indexRef.sessionsByProject = await buildCodexSessionsIndex(); - } - - const sessionsByProject = indexRef?.sessionsByProject || await buildCodexSessionsIndex(); - const sessions = sessionsByProject.get(normalizedProjectPath) || []; - - // Return limited sessions for performance (0 = unlimited for deletion) - return limit > 0 ? sessions.slice(0, limit) : [...sessions]; - - } catch (error) { - console.error('Error fetching Codex sessions:', error); - return []; - } -} - -function isVisibleCodexUserMessage(payload) { - if (!payload || payload.type !== 'user_message') { - return false; - } - - // Codex logs internal context (environment, instructions) as non-plain user_message kinds. - if (payload.kind && payload.kind !== 'plain') { - return false; - } - - if (typeof payload.message !== 'string' || payload.message.trim().length === 0) { - return false; - } - - return true; -} - -// Parse a Codex session JSONL file to extract metadata -async function parseCodexSessionFile(filePath) { - try { - const fileStream = fsSync.createReadStream(filePath); - const rl = readline.createInterface({ - input: fileStream, - crlfDelay: Infinity - }); - - let sessionMeta = null; - let lastTimestamp = null; - let lastUserMessage = null; - let messageCount = 0; - - for await (const line of rl) { - if (line.trim()) { - try { - const entry = JSON.parse(line); - - // Track timestamp - if (entry.timestamp) { - lastTimestamp = entry.timestamp; - } - - // Extract session metadata - if (entry.type === 'session_meta' && entry.payload) { - sessionMeta = { - id: entry.payload.id, - cwd: entry.payload.cwd, - model: entry.payload.model || entry.payload.model_provider, - timestamp: entry.timestamp, - git: entry.payload.git - }; - } - - // Count visible user messages and extract summary from the latest plain user input. - if (entry.type === 'event_msg' && isVisibleCodexUserMessage(entry.payload)) { - messageCount++; - if (entry.payload.message) { - lastUserMessage = entry.payload.message; - } - } - - if (entry.type === 'response_item' && entry.payload?.type === 'message' && entry.payload.role === 'assistant') { - messageCount++; - } - - } catch (parseError) { - // Skip malformed lines - } - } - } - - if (sessionMeta) { - return { - ...sessionMeta, - timestamp: lastTimestamp || sessionMeta.timestamp, - summary: lastUserMessage ? - (lastUserMessage.length > 50 ? lastUserMessage.substring(0, 50) + '...' : lastUserMessage) : - 'Codex Session', - messageCount - }; - } - - return null; - - } catch (error) { - console.error('Error parsing Codex session file:', error); - return null; - } -} - -// Get messages for a specific Codex session -async function getCodexSessionMessages(sessionId, limit = null, offset = 0) { - try { - const codexSessionsDir = path.join(os.homedir(), '.codex', 'sessions'); - - // Find the session file by searching for the session ID - const findSessionFile = async (dir) => { - try { - const entries = await fs.readdir(dir, { withFileTypes: true }); - for (const entry of entries) { - const fullPath = path.join(dir, entry.name); - if (entry.isDirectory()) { - const found = await findSessionFile(fullPath); - if (found) return found; - } else if (entry.name.includes(sessionId) && entry.name.endsWith('.jsonl')) { - return fullPath; - } - } - } catch (error) { - // Skip directories we can't read - } - return null; - }; - - const sessionFilePath = await findSessionFile(codexSessionsDir); - - if (!sessionFilePath) { - console.warn(`Codex session file not found for session ${sessionId}`); - return { messages: [], total: 0, hasMore: false }; - } - - const messages = []; - let tokenUsage = null; - const fileStream = fsSync.createReadStream(sessionFilePath); - const rl = readline.createInterface({ - input: fileStream, - crlfDelay: Infinity - }); - - // Helper to extract text from Codex content array - const extractText = (content) => { - if (!Array.isArray(content)) return content; - return content - .map(item => { - if (item.type === 'input_text' || item.type === 'output_text') { - return item.text; - } - if (item.type === 'text') { - return item.text; - } - return ''; - }) - .filter(Boolean) - .join('\n'); - }; - - for await (const line of rl) { - if (line.trim()) { - try { - const entry = JSON.parse(line); - - // Extract token usage from token_count events (keep latest) - if (entry.type === 'event_msg' && entry.payload?.type === 'token_count' && entry.payload?.info) { - const info = entry.payload.info; - if (info.total_token_usage) { - tokenUsage = { - used: info.total_token_usage.total_tokens || 0, - total: info.model_context_window || 200000 - }; - } - } - - // Use event_msg.user_message for user-visible inputs. - if (entry.type === 'event_msg' && isVisibleCodexUserMessage(entry.payload)) { - messages.push({ - type: 'user', - timestamp: entry.timestamp, - message: { - role: 'user', - content: entry.payload.message - } - }); - } - - // response_item.message may include internal prompts for non-assistant roles. - // Keep only assistant output from response_item. - if ( - entry.type === 'response_item' && - entry.payload?.type === 'message' && - entry.payload.role === 'assistant' - ) { - const content = entry.payload.content; - const textContent = extractText(content); - - // Only add if there's actual content - if (textContent?.trim()) { - messages.push({ - type: 'assistant', - timestamp: entry.timestamp, - message: { - role: 'assistant', - content: textContent - } - }); - } - } - - if (entry.type === 'response_item' && entry.payload?.type === 'reasoning') { - const summaryText = entry.payload.summary - ?.map(s => s.text) - .filter(Boolean) - .join('\n'); - if (summaryText?.trim()) { - messages.push({ - type: 'thinking', - timestamp: entry.timestamp, - message: { - role: 'assistant', - content: summaryText - } - }); - } - } - - if (entry.type === 'response_item' && entry.payload?.type === 'function_call') { - let toolName = entry.payload.name; - let toolInput = entry.payload.arguments; - - // Map Codex tool names to Claude equivalents - if (toolName === 'shell_command') { - toolName = 'Bash'; - try { - const args = JSON.parse(entry.payload.arguments); - toolInput = JSON.stringify({ command: args.command }); - } catch (e) { - // Keep original if parsing fails - } - } - - messages.push({ - type: 'tool_use', - timestamp: entry.timestamp, - toolName: toolName, - toolInput: toolInput, - toolCallId: entry.payload.call_id - }); - } - - if (entry.type === 'response_item' && entry.payload?.type === 'function_call_output') { - messages.push({ - type: 'tool_result', - timestamp: entry.timestamp, - toolCallId: entry.payload.call_id, - output: entry.payload.output - }); - } - - if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call') { - const toolName = entry.payload.name || 'custom_tool'; - const input = entry.payload.input || ''; - - if (toolName === 'apply_patch') { - // Parse Codex patch format and convert to Claude Edit format - const fileMatch = input.match(/\*\*\* Update File: (.+)/); - const filePath = fileMatch ? fileMatch[1].trim() : 'unknown'; - - // Extract old and new content from patch - const lines = input.split('\n'); - const oldLines = []; - const newLines = []; - - for (const line of lines) { - if (line.startsWith('-') && !line.startsWith('---')) { - oldLines.push(line.substring(1)); - } else if (line.startsWith('+') && !line.startsWith('+++')) { - newLines.push(line.substring(1)); - } - } - - messages.push({ - type: 'tool_use', - timestamp: entry.timestamp, - toolName: 'Edit', - toolInput: JSON.stringify({ - file_path: filePath, - old_string: oldLines.join('\n'), - new_string: newLines.join('\n') - }), - toolCallId: entry.payload.call_id - }); - } else { - messages.push({ - type: 'tool_use', - timestamp: entry.timestamp, - toolName: toolName, - toolInput: input, - toolCallId: entry.payload.call_id - }); - } - } - - if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call_output') { - messages.push({ - type: 'tool_result', - timestamp: entry.timestamp, - toolCallId: entry.payload.call_id, - output: entry.payload.output || '' - }); - } - - } catch (parseError) { - // Skip malformed lines - } - } - } - - // Sort by timestamp - messages.sort((a, b) => new Date(a.timestamp || 0) - new Date(b.timestamp || 0)); - - const total = messages.length; - - // Apply pagination if limit is specified - if (limit !== null) { - const startIndex = Math.max(0, total - offset - limit); - const endIndex = total - offset; - const paginatedMessages = messages.slice(startIndex, endIndex); - const hasMore = startIndex > 0; - - return { - messages: paginatedMessages, - total, - hasMore, - offset, - limit, - tokenUsage - }; - } - - return { messages, tokenUsage }; - - } catch (error) { - console.error(`Error reading Codex session messages for ${sessionId}:`, error); - return { messages: [], total: 0, hasMore: false }; - } -} - -async function deleteCodexSession(sessionId) { - try { - const codexSessionsDir = path.join(os.homedir(), '.codex', 'sessions'); - - const findJsonlFiles = async (dir) => { - const files = []; - try { - const entries = await fs.readdir(dir, { withFileTypes: true }); - for (const entry of entries) { - const fullPath = path.join(dir, entry.name); - if (entry.isDirectory()) { - files.push(...await findJsonlFiles(fullPath)); - } else if (entry.name.endsWith('.jsonl')) { - files.push(fullPath); - } - } - } catch (error) { } - return files; - }; - - const jsonlFiles = await findJsonlFiles(codexSessionsDir); - - for (const filePath of jsonlFiles) { - const sessionData = await parseCodexSessionFile(filePath); - if (sessionData && sessionData.id === sessionId) { - await fs.unlink(filePath); - return true; - } - } - - throw new Error(`Codex session file not found for session ${sessionId}`); - } catch (error) { - console.error(`Error deleting Codex session ${sessionId}:`, error); - throw error; - } -} - -async function searchConversations(query, limit = 50, onProjectResult = null, signal = null) { - const safeQuery = typeof query === 'string' ? query.trim() : ''; - const safeLimit = Math.max(1, Math.min(Number.isFinite(limit) ? limit : 50, 200)); - const claudeDir = path.join(os.homedir(), '.claude', 'projects'); - const config = await loadProjectConfig(); - const results = []; - let totalMatches = 0; - const words = safeQuery.toLowerCase().split(/\s+/).filter(w => w.length > 0); - if (words.length === 0) return { results: [], totalMatches: 0, query: safeQuery }; - - const isAborted = () => signal?.aborted === true; - - const isSystemMessage = (textContent) => { - return typeof textContent === 'string' && ( - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('') || - textContent.startsWith('Caveat:') || - textContent.startsWith('This session is being continued from a previous') || - textContent.startsWith('Invalid API key') || - textContent.includes('{"subtasks":') || - textContent.includes('CRITICAL: You MUST respond with ONLY a JSON') || - textContent === 'Warmup' - ); - }; - - const extractText = (content) => { - if (typeof content === 'string') return content; - if (Array.isArray(content)) { - return content - .filter(part => part.type === 'text' && part.text) - .map(part => part.text) - .join(' '); - } - return ''; - }; - - const escapeRegex = (s) => s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); - const wordPatterns = words.map(w => new RegExp(`(? { - return wordPatterns.every(p => p.test(textLower)); - }; - - const buildSnippet = (text, textLower, snippetLen = 150) => { - let firstIndex = -1; - let firstWordLen = 0; - for (const w of words) { - const re = new RegExp(`(? 0 ? '...' : ''; - const suffix = end < text.length ? '...' : ''; - snippet = prefix + snippet + suffix; - const snippetLower = snippet.toLowerCase(); - const highlights = []; - for (const word of words) { - const re = new RegExp(`(? a.start - b.start); - const merged = []; - for (const h of highlights) { - const last = merged[merged.length - 1]; - if (last && h.start <= last.end) { - last.end = Math.max(last.end, h.end); - } else { - merged.push({ ...h }); - } - } - return { snippet, highlights: merged }; - }; - - try { - await fs.access(claudeDir); - const entries = await fs.readdir(claudeDir, { withFileTypes: true }); - const projectDirs = entries.filter(e => e.isDirectory()); - let scannedProjects = 0; - const totalProjects = projectDirs.length; - - for (const projectEntry of projectDirs) { - if (totalMatches >= safeLimit || isAborted()) break; - - const projectName = projectEntry.name; - const projectDir = path.join(claudeDir, projectName); - const displayName = config[projectName]?.displayName - || await generateDisplayName(projectName); - - let files; - try { - files = await fs.readdir(projectDir); - } catch { - continue; - } - - const jsonlFiles = files.filter( - file => file.endsWith('.jsonl') && !file.startsWith('agent-') - ); - - const projectResult = { - projectName, - projectDisplayName: displayName, - sessions: [] - }; - - for (const file of jsonlFiles) { - if (totalMatches >= safeLimit || isAborted()) break; - - const filePath = path.join(projectDir, file); - const sessionMatches = new Map(); - const sessionSummaries = new Map(); - const pendingSummaries = new Map(); - const sessionLastMessages = new Map(); - let currentSessionId = null; - - try { - const fileStream = fsSync.createReadStream(filePath); - const rl = readline.createInterface({ - input: fileStream, - crlfDelay: Infinity - }); - - for await (const line of rl) { - if (totalMatches >= safeLimit || isAborted()) break; - if (!line.trim()) continue; - - let entry; - try { - entry = JSON.parse(line); - } catch { - continue; - } - - if (entry.sessionId) { - currentSessionId = entry.sessionId; - } - if (entry.type === 'summary' && entry.summary) { - const sid = entry.sessionId || currentSessionId; - if (sid) { - sessionSummaries.set(sid, entry.summary); - } else if (entry.leafUuid) { - pendingSummaries.set(entry.leafUuid, entry.summary); - } - } - - // Apply pending summary via parentUuid - if (entry.parentUuid && currentSessionId && !sessionSummaries.has(currentSessionId)) { - const pending = pendingSummaries.get(entry.parentUuid); - if (pending) sessionSummaries.set(currentSessionId, pending); - } - - // Track last user/assistant message for fallback title - if (entry.message?.content && currentSessionId && !entry.isApiErrorMessage) { - const role = entry.message.role; - if (role === 'user' || role === 'assistant') { - const text = extractText(entry.message.content); - if (text && !isSystemMessage(text)) { - if (!sessionLastMessages.has(currentSessionId)) { - sessionLastMessages.set(currentSessionId, {}); - } - const msgs = sessionLastMessages.get(currentSessionId); - if (role === 'user') msgs.user = text; - else msgs.assistant = text; - } - } - } - - if (!entry.message?.content) continue; - if (entry.message.role !== 'user' && entry.message.role !== 'assistant') continue; - if (entry.isApiErrorMessage) continue; - - const text = extractText(entry.message.content); - if (!text || isSystemMessage(text)) continue; - - const textLower = text.toLowerCase(); - if (!allWordsMatch(textLower)) continue; - - const sessionId = entry.sessionId || currentSessionId || file.replace('.jsonl', ''); - if (!sessionMatches.has(sessionId)) { - sessionMatches.set(sessionId, []); - } - - const matches = sessionMatches.get(sessionId); - if (matches.length < 2) { - const { snippet, highlights } = buildSnippet(text, textLower); - matches.push({ - role: entry.message.role, - snippet, - highlights, - timestamp: entry.timestamp || null, - provider: 'claude', - messageUuid: entry.uuid || null - }); - totalMatches++; - } - } - } catch { - continue; - } - - for (const [sessionId, matches] of sessionMatches) { - projectResult.sessions.push({ - sessionId, - provider: 'claude', - sessionSummary: sessionSummaries.get(sessionId) || (() => { - const msgs = sessionLastMessages.get(sessionId); - const lastMsg = msgs?.user || msgs?.assistant; - return lastMsg ? (lastMsg.length > 50 ? lastMsg.substring(0, 50) + '...' : lastMsg) : 'New Session'; - })(), - matches - }); - } - } - - // Search Codex sessions for this project - try { - const actualProjectDir = await extractProjectDirectory(projectName); - if (actualProjectDir && !isAborted() && totalMatches < safeLimit) { - await searchCodexSessionsForProject( - actualProjectDir, projectResult, words, allWordsMatch, extractText, isSystemMessage, - buildSnippet, safeLimit, () => totalMatches, (n) => { totalMatches += n; }, isAborted - ); - } - } catch { - // Skip codex search errors - } - - // Search Gemini sessions for this project - try { - const actualProjectDir = await extractProjectDirectory(projectName); - if (actualProjectDir && !isAborted() && totalMatches < safeLimit) { - await searchGeminiSessionsForProject( - actualProjectDir, projectResult, words, allWordsMatch, - buildSnippet, safeLimit, () => totalMatches, (n) => { totalMatches += n; } - ); - } - } catch { - // Skip gemini search errors - } - - scannedProjects++; - if (projectResult.sessions.length > 0) { - results.push(projectResult); - if (onProjectResult) { - onProjectResult({ projectResult, totalMatches, scannedProjects, totalProjects }); - } - } else if (onProjectResult && scannedProjects % 10 === 0) { - onProjectResult({ projectResult: null, totalMatches, scannedProjects, totalProjects }); - } - } - } catch { - // claudeDir doesn't exist - } - - return { results, totalMatches, query: safeQuery }; -} - -async function searchCodexSessionsForProject( - projectPath, projectResult, words, allWordsMatch, extractText, isSystemMessage, - buildSnippet, limit, getTotalMatches, addMatches, isAborted -) { - const normalizedProjectPath = normalizeComparablePath(projectPath); - if (!normalizedProjectPath) return; - const codexSessionsDir = path.join(os.homedir(), '.codex', 'sessions'); - try { - await fs.access(codexSessionsDir); - } catch { - return; - } - - const jsonlFiles = await findCodexJsonlFiles(codexSessionsDir); - - for (const filePath of jsonlFiles) { - if (getTotalMatches() >= limit || isAborted()) break; - - try { - const fileStream = fsSync.createReadStream(filePath); - const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity }); - - // First pass: read session_meta to check project path match - let sessionMeta = null; - for await (const line of rl) { - if (!line.trim()) continue; - try { - const entry = JSON.parse(line); - if (entry.type === 'session_meta' && entry.payload) { - sessionMeta = entry.payload; - break; - } - } catch { continue; } - } - - // Skip sessions that don't belong to this project - if (!sessionMeta) continue; - const sessionProjectPath = normalizeComparablePath(sessionMeta.cwd); - if (sessionProjectPath !== normalizedProjectPath) continue; - - // Second pass: re-read file to find matching messages - const fileStream2 = fsSync.createReadStream(filePath); - const rl2 = readline.createInterface({ input: fileStream2, crlfDelay: Infinity }); - let lastUserMessage = null; - const matches = []; - - for await (const line of rl2) { - if (getTotalMatches() >= limit || isAborted()) break; - if (!line.trim()) continue; - - let entry; - try { entry = JSON.parse(line); } catch { continue; } - - let text = null; - let role = null; - - if (entry.type === 'event_msg' && entry.payload?.type === 'user_message' && entry.payload.message) { - text = entry.payload.message; - role = 'user'; - lastUserMessage = text; - } else if (entry.type === 'response_item' && entry.payload?.type === 'message') { - const contentParts = entry.payload.content || []; - if (entry.payload.role === 'user') { - text = contentParts - .filter(p => p.type === 'input_text' && p.text) - .map(p => p.text) - .join(' '); - role = 'user'; - if (text) lastUserMessage = text; - } else if (entry.payload.role === 'assistant') { - text = contentParts - .filter(p => p.type === 'output_text' && p.text) - .map(p => p.text) - .join(' '); - role = 'assistant'; - } - } - - if (!text || !role) continue; - const textLower = text.toLowerCase(); - if (!allWordsMatch(textLower)) continue; - - if (matches.length < 2) { - const { snippet, highlights } = buildSnippet(text, textLower); - matches.push({ role, snippet, highlights, timestamp: entry.timestamp || null, provider: 'codex' }); - addMatches(1); - } - } - - if (matches.length > 0) { - projectResult.sessions.push({ - sessionId: sessionMeta.id, - provider: 'codex', - sessionSummary: lastUserMessage - ? (lastUserMessage.length > 50 ? lastUserMessage.substring(0, 50) + '...' : lastUserMessage) - : 'Codex Session', - matches - }); - } - } catch { - continue; - } - } -} - -async function searchGeminiSessionsForProject( - projectPath, projectResult, words, allWordsMatch, - buildSnippet, limit, getTotalMatches, addMatches -) { - // 1) Search in-memory sessions (created via UI) - for (const [sessionId, session] of sessionManager.sessions) { - if (getTotalMatches() >= limit) break; - if (session.projectPath !== projectPath) continue; - - const matches = []; - for (const msg of session.messages) { - if (getTotalMatches() >= limit) break; - if (msg.role !== 'user' && msg.role !== 'assistant') continue; - - const text = typeof msg.content === 'string' ? msg.content - : Array.isArray(msg.content) ? msg.content.filter(p => p.type === 'text').map(p => p.text).join(' ') - : ''; - if (!text) continue; - - const textLower = text.toLowerCase(); - if (!allWordsMatch(textLower)) continue; - - if (matches.length < 2) { - const { snippet, highlights } = buildSnippet(text, textLower); - matches.push({ - role: msg.role, snippet, highlights, - timestamp: msg.timestamp ? msg.timestamp.toISOString() : null, - provider: 'gemini' - }); - addMatches(1); - } - } - - if (matches.length > 0) { - const firstUserMsg = session.messages.find(m => m.role === 'user'); - const summary = firstUserMsg?.content - ? (typeof firstUserMsg.content === 'string' - ? (firstUserMsg.content.length > 50 ? firstUserMsg.content.substring(0, 50) + '...' : firstUserMsg.content) - : 'Gemini Session') - : 'Gemini Session'; - - projectResult.sessions.push({ - sessionId, - provider: 'gemini', - sessionSummary: summary, - matches - }); - } - } - - // 2) Search Gemini CLI sessions on disk (~/.gemini/tmp//chats/*.json) - const normalizedProjectPath = normalizeComparablePath(projectPath); - if (!normalizedProjectPath) return; - - const geminiTmpDir = path.join(os.homedir(), '.gemini', 'tmp'); - try { - await fs.access(geminiTmpDir); - } catch { - return; - } - - const trackedSessionIds = new Set(); - for (const [sid] of sessionManager.sessions) { - trackedSessionIds.add(sid); - } - - let projectDirs; - try { - projectDirs = await fs.readdir(geminiTmpDir); - } catch { - return; - } - - for (const projectDir of projectDirs) { - if (getTotalMatches() >= limit) break; - - const projectRootFile = path.join(geminiTmpDir, projectDir, '.project_root'); - let projectRoot; - try { - projectRoot = (await fs.readFile(projectRootFile, 'utf8')).trim(); - } catch { - continue; - } - - if (normalizeComparablePath(projectRoot) !== normalizedProjectPath) continue; - - const chatsDir = path.join(geminiTmpDir, projectDir, 'chats'); - let chatFiles; - try { - chatFiles = await fs.readdir(chatsDir); - } catch { - continue; - } - - for (const chatFile of chatFiles) { - if (getTotalMatches() >= limit) break; - if (!chatFile.endsWith('.json')) continue; - - try { - const filePath = path.join(chatsDir, chatFile); - const data = await fs.readFile(filePath, 'utf8'); - const session = JSON.parse(data); - if (!session.messages || !Array.isArray(session.messages)) continue; - - const cliSessionId = session.sessionId || chatFile.replace('.json', ''); - if (trackedSessionIds.has(cliSessionId)) continue; - - const matches = []; - let firstUserText = null; - - for (const msg of session.messages) { - if (getTotalMatches() >= limit) break; - - const role = msg.type === 'user' ? 'user' - : (msg.type === 'gemini' || msg.type === 'assistant') ? 'assistant' - : null; - if (!role) continue; - - let text = ''; - if (typeof msg.content === 'string') { - text = msg.content; - } else if (Array.isArray(msg.content)) { - text = msg.content - .filter(p => p.text) - .map(p => p.text) - .join(' '); - } - if (!text) continue; - - if (role === 'user' && !firstUserText) firstUserText = text; - - const textLower = text.toLowerCase(); - if (!allWordsMatch(textLower)) continue; - - if (matches.length < 2) { - const { snippet, highlights } = buildSnippet(text, textLower); - matches.push({ - role, snippet, highlights, - timestamp: msg.timestamp || null, - provider: 'gemini' - }); - addMatches(1); - } - } - - if (matches.length > 0) { - const summary = firstUserText - ? (firstUserText.length > 50 ? firstUserText.substring(0, 50) + '...' : firstUserText) - : 'Gemini CLI Session'; - - projectResult.sessions.push({ - sessionId: cliSessionId, - provider: 'gemini', - sessionSummary: summary, - matches - }); - } - } catch { - continue; - } - } - } -} - -async function getGeminiCliSessions(projectPath) { - const normalizedProjectPath = normalizeComparablePath(projectPath); - if (!normalizedProjectPath) return []; - - const geminiTmpDir = path.join(os.homedir(), '.gemini', 'tmp'); - try { - await fs.access(geminiTmpDir); - } catch { - return []; - } - - const sessions = []; - let projectDirs; - try { - projectDirs = await fs.readdir(geminiTmpDir); - } catch { - return []; - } - - for (const projectDir of projectDirs) { - const projectRootFile = path.join(geminiTmpDir, projectDir, '.project_root'); - let projectRoot; - try { - projectRoot = (await fs.readFile(projectRootFile, 'utf8')).trim(); - } catch { - continue; - } - - if (normalizeComparablePath(projectRoot) !== normalizedProjectPath) continue; - - const chatsDir = path.join(geminiTmpDir, projectDir, 'chats'); - let chatFiles; - try { - chatFiles = await fs.readdir(chatsDir); - } catch { - continue; - } - - for (const chatFile of chatFiles) { - if (!chatFile.endsWith('.json')) continue; - try { - const filePath = path.join(chatsDir, chatFile); - const data = await fs.readFile(filePath, 'utf8'); - const session = JSON.parse(data); - if (!session.messages || !Array.isArray(session.messages)) continue; - - const sessionId = session.sessionId || chatFile.replace('.json', ''); - const firstUserMsg = session.messages.find(m => m.type === 'user'); - let summary = 'Gemini CLI Session'; - if (firstUserMsg) { - const text = Array.isArray(firstUserMsg.content) - ? firstUserMsg.content.filter(p => p.text).map(p => p.text).join(' ') - : (typeof firstUserMsg.content === 'string' ? firstUserMsg.content : ''); - if (text) { - summary = text.length > 50 ? text.substring(0, 50) + '...' : text; - } - } - - sessions.push({ - id: sessionId, - summary, - messageCount: session.messages.length, - lastActivity: session.lastUpdated || session.startTime || null, - provider: 'gemini' - }); - } catch { - continue; - } - } - } - - return sessions.sort((a, b) => - new Date(b.lastActivity || 0) - new Date(a.lastActivity || 0) - ); -} - -async function getGeminiCliSessionMessages(sessionId) { - const geminiTmpDir = path.join(os.homedir(), '.gemini', 'tmp'); - let projectDirs; - try { - projectDirs = await fs.readdir(geminiTmpDir); - } catch { - return []; - } - - for (const projectDir of projectDirs) { - const chatsDir = path.join(geminiTmpDir, projectDir, 'chats'); - let chatFiles; - try { - chatFiles = await fs.readdir(chatsDir); - } catch { - continue; - } - - for (const chatFile of chatFiles) { - if (!chatFile.endsWith('.json')) continue; - try { - const filePath = path.join(chatsDir, chatFile); - const data = await fs.readFile(filePath, 'utf8'); - const session = JSON.parse(data); - const fileSessionId = session.sessionId || chatFile.replace('.json', ''); - if (fileSessionId !== sessionId) continue; - - return (session.messages || []).map(msg => { - const role = msg.type === 'user' ? 'user' - : (msg.type === 'gemini' || msg.type === 'assistant') ? 'assistant' - : msg.type; - - let content = ''; - if (typeof msg.content === 'string') { - content = msg.content; - } else if (Array.isArray(msg.content)) { - content = msg.content.filter(p => p.text).map(p => p.text).join('\n'); - } - - return { - type: 'message', - message: { role, content }, - timestamp: msg.timestamp || null - }; - }); - } catch { - continue; - } - } - } - - return []; -} - -export { - getProjects, - getSessions, - getSessionMessages, - parseJsonlSessions, - renameProject, - deleteSession, - isProjectEmpty, - deleteProject, - addProjectManually, - loadProjectConfig, - saveProjectConfig, - extractProjectDirectory, - clearProjectDirectoryCache, - getCodexSessions, - getCodexSessionMessages, - deleteCodexSession, - getGeminiCliSessions, - getGeminiCliSessionMessages, - searchConversations -}; diff --git a/server/routes/agent.js b/server/routes/agent.js index cdcd3a65..37a9ed26 100644 --- a/server/routes/agent.js +++ b/server/routes/agent.js @@ -4,8 +4,7 @@ import path from 'path'; import os from 'os'; import { promises as fs } from 'fs'; import crypto from 'crypto'; -import { userDb, apiKeysDb, githubTokensDb } from '../database/db.js'; -import { addProjectManually } from '../projects.js'; +import { userDb, apiKeysDb, githubTokensDb, projectsDb } from '../modules/database/index.js'; import { queryClaudeSDK } from '../claude-sdk.js'; import { spawnCursor } from '../cursor-cli.js'; import { queryCodex } from '../openai-codex.js'; @@ -13,6 +12,7 @@ import { spawnGemini } from '../gemini-cli.js'; import { Octokit } from '@octokit/rest'; import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../shared/modelConstants.js'; import { IS_PLATFORM } from '../constants/config.js'; +import { normalizeProjectPath } from '../shared/utils.js'; const router = express.Router(); @@ -890,7 +890,7 @@ router.post('/', validateExternalApiKey, async (req, res) => { finalProjectPath = await cloneGitHubRepo(githubUrl.trim(), tokenToUse, targetPath); } else { // Use existing project path - finalProjectPath = path.resolve(projectPath); + finalProjectPath = normalizeProjectPath(path.resolve(projectPath)); // Verify the path exists try { @@ -900,19 +900,14 @@ router.post('/', validateExternalApiKey, async (req, res) => { } } - // Register the project (or use existing registration) - let project; - try { - project = await addProjectManually(finalProjectPath); - console.log('📦 Project registered:', project); - } catch (error) { - // If project already exists, that's fine - continue with the existing registration - if (error.message && error.message.includes('Project already configured')) { - console.log('📦 Using existing project registration for:', finalProjectPath); - project = { path: finalProjectPath }; - } else { - throw error; - } + finalProjectPath = normalizeProjectPath(finalProjectPath); + + // Register project path in DB (or reuse existing active registration) + const registrationResult = projectsDb.createProjectPath(finalProjectPath, null); + if (registrationResult.outcome === 'active_conflict') { + console.log('Project registration already exists for:', finalProjectPath); + } else { + console.log('Project registered:', registrationResult.project); } // Set up writer based on streaming mode diff --git a/server/routes/auth.js b/server/routes/auth.js index be4c38c1..dcb2e3ff 100644 --- a/server/routes/auth.js +++ b/server/routes/auth.js @@ -1,9 +1,11 @@ import express from 'express'; import bcrypt from 'bcrypt'; -import { userDb, db } from '../database/db.js'; +import { userDb } from '../modules/database/index.js'; +import { getConnection } from '../modules/database/connection.js'; import { generateToken, authenticateToken } from '../middleware/auth.js'; const router = express.Router(); +const db = getConnection(); // Check auth status and setup requirements router.get('/status', async (req, res) => { @@ -132,4 +134,4 @@ router.post('/logout', authenticateToken, (req, res) => { res.json({ success: true, message: 'Logged out successfully' }); }); -export default router; \ No newline at end of file +export default router; diff --git a/server/routes/codex.js b/server/routes/codex.js deleted file mode 100644 index 06630414..00000000 --- a/server/routes/codex.js +++ /dev/null @@ -1,19 +0,0 @@ -import express from 'express'; -import { deleteCodexSession } from '../projects.js'; -import { sessionNamesDb } from '../database/db.js'; - -const router = express.Router(); - -router.delete('/sessions/:sessionId', async (req, res) => { - try { - const { sessionId } = req.params; - await deleteCodexSession(sessionId); - sessionNamesDb.deleteName(sessionId, 'codex'); - res.json({ success: true }); - } catch (error) { - console.error(`Error deleting Codex session ${req.params.sessionId}:`, error); - res.status(500).json({ success: false, error: error.message }); - } -}); - -export default router; diff --git a/server/routes/gemini.js b/server/routes/gemini.js index ff7f3663..341365b4 100644 --- a/server/routes/gemini.js +++ b/server/routes/gemini.js @@ -1,6 +1,7 @@ import express from 'express'; + import sessionManager from '../sessionManager.js'; -import { sessionNamesDb } from '../database/db.js'; +import { sessionsDb } from '../modules/database/index.js'; const router = express.Router(); @@ -13,7 +14,7 @@ router.delete('/sessions/:sessionId', async (req, res) => { } await sessionManager.deleteSession(sessionId); - sessionNamesDb.deleteName(sessionId, 'gemini'); + sessionsDb.deleteSessionById(sessionId); res.json({ success: true }); } catch (error) { console.error(`Error deleting Gemini session ${req.params.sessionId}:`, error); diff --git a/server/routes/git.js b/server/routes/git.js index a4395638..2aebdad4 100755 --- a/server/routes/git.js +++ b/server/routes/git.js @@ -2,7 +2,7 @@ import express from 'express'; import { spawn } from 'child_process'; import path from 'path'; import { promises as fs } from 'fs'; -import { extractProjectDirectory } from '../projects.js'; +import { projectsDb } from '../modules/database/index.js'; import { queryClaudeSDK } from '../claude-sdk.js'; import { spawnCursor } from '../cursor-cli.js'; @@ -101,14 +101,19 @@ function validateProjectPath(projectPath) { return resolved; } -// Helper function to get the actual project path from the encoded project name -async function getActualProjectPath(projectName) { - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { - console.error(`Error extracting project directory for ${projectName}:`, error); - throw new Error(`Unable to resolve project path for "${projectName}"`); +/** + * Resolve the absolute project directory for a given DB `projectId`. + * + * After the projectName → projectId migration, every git endpoint receives + * the DB primary key (`project` query/body param). The legacy filesystem + * resolver that walked Claude's JSONL history is no longer used here; the + * path comes straight from the `projects` table and is then sanity-checked + * by `validateProjectPath` before any `git` command runs against it. + */ +async function getActualProjectPath(projectId) { + const projectPath = await projectsDb.getProjectPathById(projectId); + if (!projectPath) { + throw new Error(`Unable to resolve project path for "${projectId}"`); } return validateProjectPath(projectPath); } @@ -292,7 +297,7 @@ router.get('/status', async (req, res) => { const { project } = req.query; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -355,7 +360,7 @@ router.get('/diff', async (req, res) => { const { project, file } = req.query; if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res.status(400).json({ error: 'Project id and file path are required' }); } try { @@ -438,7 +443,7 @@ router.get('/file-with-diff', async (req, res) => { const { project, file } = req.query; if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res.status(400).json({ error: 'Project id and file path are required' }); } try { @@ -518,7 +523,7 @@ router.post('/initial-commit', async (req, res) => { const { project } = req.body; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -593,7 +598,7 @@ router.post('/revert-local-commit', async (req, res) => { const { project } = req.body; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -640,7 +645,7 @@ router.get('/branches', async (req, res) => { const { project } = req.query; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -684,7 +689,7 @@ router.post('/checkout', async (req, res) => { const { project, branch } = req.body; if (!project || !branch) { - return res.status(400).json({ error: 'Project name and branch are required' }); + return res.status(400).json({ error: 'Project id and branch are required' }); } try { @@ -706,7 +711,7 @@ router.post('/create-branch', async (req, res) => { const { project, branch } = req.body; if (!project || !branch) { - return res.status(400).json({ error: 'Project name and branch name are required' }); + return res.status(400).json({ error: 'Project id and branch name are required' }); } try { @@ -728,7 +733,7 @@ router.post('/delete-branch', async (req, res) => { const { project, branch } = req.body; if (!project || !branch) { - return res.status(400).json({ error: 'Project name and branch name are required' }); + return res.status(400).json({ error: 'Project id and branch name are required' }); } try { @@ -754,7 +759,7 @@ router.get('/commits', async (req, res) => { const { project, limit = 10 } = req.query; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -811,7 +816,7 @@ router.get('/commit-diff', async (req, res) => { const { project, commit } = req.query; if (!project || !commit) { - return res.status(400).json({ error: 'Project name and commit hash are required' }); + return res.status(400).json({ error: 'Project id and commit hash are required' }); } try { @@ -843,7 +848,7 @@ router.post('/generate-commit-message', async (req, res) => { const { project, files, provider = 'claude' } = req.body; if (!project || !files || files.length === 0) { - return res.status(400).json({ error: 'Project name and files are required' }); + return res.status(400).json({ error: 'Project id and files are required' }); } // Validate provider @@ -1048,7 +1053,7 @@ router.get('/remote-status', async (req, res) => { const { project } = req.query; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -1126,7 +1131,7 @@ router.post('/fetch', async (req, res) => { const { project } = req.body; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -1167,7 +1172,7 @@ router.post('/pull', async (req, res) => { const { project } = req.body; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -1235,7 +1240,7 @@ router.post('/push', async (req, res) => { const { project } = req.body; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: 'Project id is required' }); } try { @@ -1306,7 +1311,7 @@ router.post('/publish', async (req, res) => { const { project, branch } = req.body; if (!project || !branch) { - return res.status(400).json({ error: 'Project name and branch are required' }); + return res.status(400).json({ error: 'Project id and branch are required' }); } try { @@ -1385,7 +1390,7 @@ router.post('/discard', async (req, res) => { const { project, file } = req.body; if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res.status(400).json({ error: 'Project id and file path are required' }); } try { @@ -1439,7 +1444,7 @@ router.post('/delete-untracked', async (req, res) => { const { project, file } = req.body; if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res.status(400).json({ error: 'Project id and file path are required' }); } try { diff --git a/server/routes/messages.js b/server/routes/messages.js deleted file mode 100644 index 81444d56..00000000 --- a/server/routes/messages.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Unified messages endpoint. - * - * GET /api/sessions/:sessionId/messages?provider=claude&projectName=foo&limit=50&offset=0 - * - * Replaces the four provider-specific session message endpoints with a single route - * that delegates to the appropriate adapter via the provider registry. - * - * @module routes/messages - */ - -import express from 'express'; -import { sessionsService } from '../modules/providers/services/sessions.service.js'; - -const router = express.Router(); - -/** - * GET /api/sessions/:sessionId/messages - * - * Auth: authenticateToken applied at mount level in index.js - * - * Query params: - * provider - 'claude' | 'cursor' | 'codex' | 'gemini' (default: 'claude') - * projectName - required for claude provider - * projectPath - required for cursor provider (absolute path used for cwdId hash) - * limit - page size (omit or null for all) - * offset - pagination offset (default: 0) - */ -router.get('/:sessionId/messages', async (req, res) => { - try { - const { sessionId } = req.params; - const provider = String(req.query.provider || 'claude').trim().toLowerCase(); - const projectName = req.query.projectName || ''; - const projectPath = req.query.projectPath || ''; - const limitParam = req.query.limit; - const limit = limitParam !== undefined && limitParam !== null && limitParam !== '' - ? parseInt(limitParam, 10) - : null; - const offset = parseInt(req.query.offset || '0', 10); - - const availableProviders = sessionsService.listProviderIds(); - if (!availableProviders.includes(provider)) { - const available = availableProviders.join(', '); - return res.status(400).json({ error: `Unknown provider: ${provider}. Available: ${available}` }); - } - - const result = await sessionsService.fetchHistory(provider, sessionId, { - projectName, - projectPath, - limit, - offset, - }); - - return res.json(result); - } catch (error) { - console.error('Error fetching unified messages:', error); - return res.status(500).json({ error: 'Failed to fetch messages' }); - } -}); - -export default router; diff --git a/server/routes/projects.js b/server/routes/projects.js deleted file mode 100644 index cf3a62e4..00000000 --- a/server/routes/projects.js +++ /dev/null @@ -1,548 +0,0 @@ -import express from 'express'; -import { promises as fs } from 'fs'; -import path from 'path'; -import { spawn } from 'child_process'; -import os from 'os'; -import { addProjectManually } from '../projects.js'; - -const router = express.Router(); - -function sanitizeGitError(message, token) { - if (!message || !token) return message; - return message.replace(new RegExp(token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), '***'); -} - -// Configure allowed workspace root (defaults to user's home directory) -export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir(); - -// System-critical paths that should never be used as workspace directories -export const FORBIDDEN_PATHS = [ - // Unix - '/', - '/etc', - '/bin', - '/sbin', - '/usr', - '/dev', - '/proc', - '/sys', - '/var', - '/boot', - '/root', - '/lib', - '/lib64', - '/opt', - '/tmp', - '/run', - // Windows - 'C:\\Windows', - 'C:\\Program Files', - 'C:\\Program Files (x86)', - 'C:\\ProgramData', - 'C:\\System Volume Information', - 'C:\\$Recycle.Bin' -]; - -/** - * Validates that a path is safe for workspace operations - * @param {string} requestedPath - The path to validate - * @returns {Promise<{valid: boolean, resolvedPath?: string, error?: string}>} - */ -export async function validateWorkspacePath(requestedPath) { - try { - // Resolve to absolute path - let absolutePath = path.resolve(requestedPath); - - // Check if path is a forbidden system directory - const normalizedPath = path.normalize(absolutePath); - if (FORBIDDEN_PATHS.includes(normalizedPath) || normalizedPath === '/') { - return { - valid: false, - error: 'Cannot use system-critical directories as workspace locations' - }; - } - - // Additional check for paths starting with forbidden directories - for (const forbidden of FORBIDDEN_PATHS) { - if (normalizedPath === forbidden || - normalizedPath.startsWith(forbidden + path.sep)) { - // Exception: /var/tmp and similar user-accessible paths might be allowed - // but /var itself and most /var subdirectories should be blocked - if (forbidden === '/var' && - (normalizedPath.startsWith('/var/tmp') || - normalizedPath.startsWith('/var/folders'))) { - continue; // Allow these specific cases - } - - return { - valid: false, - error: `Cannot create workspace in system directory: ${forbidden}` - }; - } - } - - // Try to resolve the real path (following symlinks) - let realPath; - try { - // Check if path exists to resolve real path - await fs.access(absolutePath); - realPath = await fs.realpath(absolutePath); - } catch (error) { - if (error.code === 'ENOENT') { - // Path doesn't exist yet - check parent directory - let parentPath = path.dirname(absolutePath); - try { - const parentRealPath = await fs.realpath(parentPath); - - // Reconstruct the full path with real parent - realPath = path.join(parentRealPath, path.basename(absolutePath)); - } catch (parentError) { - if (parentError.code === 'ENOENT') { - // Parent doesn't exist either - use the absolute path as-is - // We'll validate it's within allowed root - realPath = absolutePath; - } else { - throw parentError; - } - } - } else { - throw error; - } - } - - // Resolve the workspace root to its real path - const resolvedWorkspaceRoot = await fs.realpath(WORKSPACES_ROOT); - - // Ensure the resolved path is contained within the allowed workspace root - if (!realPath.startsWith(resolvedWorkspaceRoot + path.sep) && - realPath !== resolvedWorkspaceRoot) { - return { - valid: false, - error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}` - }; - } - - // Additional symlink check for existing paths - try { - await fs.access(absolutePath); - const stats = await fs.lstat(absolutePath); - - if (stats.isSymbolicLink()) { - // Verify symlink target is also within allowed root - const linkTarget = await fs.readlink(absolutePath); - const resolvedTarget = path.resolve(path.dirname(absolutePath), linkTarget); - const realTarget = await fs.realpath(resolvedTarget); - - if (!realTarget.startsWith(resolvedWorkspaceRoot + path.sep) && - realTarget !== resolvedWorkspaceRoot) { - return { - valid: false, - error: 'Symlink target is outside the allowed workspace root' - }; - } - } - } catch (error) { - if (error.code !== 'ENOENT') { - throw error; - } - // Path doesn't exist - that's fine for new workspace creation - } - - return { - valid: true, - resolvedPath: realPath - }; - - } catch (error) { - return { - valid: false, - error: `Path validation failed: ${error.message}` - }; - } -} - -/** - * Create a new workspace - * POST /api/projects/create-workspace - * - * Body: - * - workspaceType: 'existing' | 'new' - * - path: string (workspace path) - * - githubUrl?: string (optional, for new workspaces) - * - githubTokenId?: number (optional, ID of stored token) - * - newGithubToken?: string (optional, one-time token) - */ -router.post('/create-workspace', async (req, res) => { - try { - const { workspaceType, path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.body; - - // Validate required fields - if (!workspaceType || !workspacePath) { - return res.status(400).json({ error: 'workspaceType and path are required' }); - } - - if (!['existing', 'new'].includes(workspaceType)) { - return res.status(400).json({ error: 'workspaceType must be "existing" or "new"' }); - } - - // Validate path safety before any operations - const validation = await validateWorkspacePath(workspacePath); - if (!validation.valid) { - return res.status(400).json({ - error: 'Invalid workspace path', - details: validation.error - }); - } - - const absolutePath = validation.resolvedPath; - - // Handle existing workspace - if (workspaceType === 'existing') { - // Check if the path exists - try { - await fs.access(absolutePath); - const stats = await fs.stat(absolutePath); - - if (!stats.isDirectory()) { - return res.status(400).json({ error: 'Path exists but is not a directory' }); - } - } catch (error) { - if (error.code === 'ENOENT') { - return res.status(404).json({ error: 'Workspace path does not exist' }); - } - throw error; - } - - // Add the existing workspace to the project list - const project = await addProjectManually(absolutePath); - - return res.json({ - success: true, - project, - message: 'Existing workspace added successfully' - }); - } - - // Handle new workspace creation - if (workspaceType === 'new') { - // Create the directory if it doesn't exist - await fs.mkdir(absolutePath, { recursive: true }); - - // If GitHub URL is provided, clone the repository - if (githubUrl) { - let githubToken = null; - - // Get GitHub token if needed - if (githubTokenId) { - // Fetch token from database - const token = await getGithubTokenById(githubTokenId, req.user.id); - if (!token) { - // Clean up created directory - await fs.rm(absolutePath, { recursive: true, force: true }); - return res.status(404).json({ error: 'GitHub token not found' }); - } - githubToken = token.github_token; - } else if (newGithubToken) { - githubToken = newGithubToken; - } - - // Extract repo name from URL for the clone destination - const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, ''); - const repoName = normalizedUrl.split('/').pop() || 'repository'; - const clonePath = path.join(absolutePath, repoName); - - // Check if clone destination already exists to prevent data loss - try { - await fs.access(clonePath); - return res.status(409).json({ - error: 'Directory already exists', - details: `The destination path "${clonePath}" already exists. Please choose a different location or remove the existing directory.` - }); - } catch (err) { - // Directory doesn't exist, which is what we want - } - - // Clone the repository into a subfolder - try { - await cloneGitHubRepository(githubUrl, clonePath, githubToken); - } catch (error) { - // Only clean up if clone created partial data (check if dir exists and is empty or partial) - try { - const stats = await fs.stat(clonePath); - if (stats.isDirectory()) { - await fs.rm(clonePath, { recursive: true, force: true }); - } - } catch (cleanupError) { - // Directory doesn't exist or cleanup failed - ignore - } - throw new Error(`Failed to clone repository: ${error.message}`); - } - - // Add the cloned repo path to the project list - const project = await addProjectManually(clonePath); - - return res.json({ - success: true, - project, - message: 'New workspace created and repository cloned successfully' - }); - } - - // Add the new workspace to the project list (no clone) - const project = await addProjectManually(absolutePath); - - return res.json({ - success: true, - project, - message: 'New workspace created successfully' - }); - } - - } catch (error) { - console.error('Error creating workspace:', error); - res.status(500).json({ - error: error.message || 'Failed to create workspace', - details: process.env.NODE_ENV === 'development' ? error.stack : undefined - }); - } -}); - -/** - * Helper function to get GitHub token from database - */ -async function getGithubTokenById(tokenId, userId) { - const { db } = await import('../database/db.js'); - - const credential = db.prepare( - 'SELECT * FROM user_credentials WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1' - ).get(tokenId, userId, 'github_token'); - - // Return in the expected format (github_token field for compatibility) - if (credential) { - return { - ...credential, - github_token: credential.credential_value - }; - } - - return null; -} - -/** - * Clone repository with progress streaming (SSE) - * GET /api/projects/clone-progress - */ -router.get('/clone-progress', async (req, res) => { - const { path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.query; - - res.setHeader('Content-Type', 'text/event-stream'); - res.setHeader('Cache-Control', 'no-cache'); - res.setHeader('Connection', 'keep-alive'); - res.flushHeaders(); - - const sendEvent = (type, data) => { - res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`); - }; - - try { - if (!workspacePath || !githubUrl) { - sendEvent('error', { message: 'workspacePath and githubUrl are required' }); - res.end(); - return; - } - - const validation = await validateWorkspacePath(workspacePath); - if (!validation.valid) { - sendEvent('error', { message: validation.error }); - res.end(); - return; - } - - const absolutePath = validation.resolvedPath; - - await fs.mkdir(absolutePath, { recursive: true }); - - let githubToken = null; - if (githubTokenId) { - const token = await getGithubTokenById(parseInt(githubTokenId), req.user.id); - if (!token) { - await fs.rm(absolutePath, { recursive: true, force: true }); - sendEvent('error', { message: 'GitHub token not found' }); - res.end(); - return; - } - githubToken = token.github_token; - } else if (newGithubToken) { - githubToken = newGithubToken; - } - - const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, ''); - const repoName = normalizedUrl.split('/').pop() || 'repository'; - const clonePath = path.join(absolutePath, repoName); - - // Check if clone destination already exists to prevent data loss - try { - await fs.access(clonePath); - sendEvent('error', { message: `Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.` }); - res.end(); - return; - } catch (err) { - // Directory doesn't exist, which is what we want - } - - let cloneUrl = githubUrl; - if (githubToken) { - try { - const url = new URL(githubUrl); - url.username = githubToken; - url.password = ''; - cloneUrl = url.toString(); - } catch (error) { - // SSH URL or invalid - use as-is - } - } - - sendEvent('progress', { message: `Cloning into '${repoName}'...` }); - - const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, clonePath], { - stdio: ['ignore', 'pipe', 'pipe'], - env: { - ...process.env, - GIT_TERMINAL_PROMPT: '0' - } - }); - - let lastError = ''; - - gitProcess.stdout.on('data', (data) => { - const message = data.toString().trim(); - if (message) { - sendEvent('progress', { message }); - } - }); - - gitProcess.stderr.on('data', (data) => { - const message = data.toString().trim(); - lastError = message; - if (message) { - sendEvent('progress', { message }); - } - }); - - gitProcess.on('close', async (code) => { - if (code === 0) { - try { - const project = await addProjectManually(clonePath); - sendEvent('complete', { project, message: 'Repository cloned successfully' }); - } catch (error) { - sendEvent('error', { message: `Clone succeeded but failed to add project: ${error.message}` }); - } - } else { - const sanitizedError = sanitizeGitError(lastError, githubToken); - let errorMessage = 'Git clone failed'; - if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) { - errorMessage = 'Authentication failed. Please check your credentials.'; - } else if (lastError.includes('Repository not found')) { - errorMessage = 'Repository not found. Please check the URL and ensure you have access.'; - } else if (lastError.includes('already exists')) { - errorMessage = 'Directory already exists'; - } else if (sanitizedError) { - errorMessage = sanitizedError; - } - try { - await fs.rm(clonePath, { recursive: true, force: true }); - } catch (cleanupError) { - console.error('Failed to clean up after clone failure:', sanitizeGitError(cleanupError.message, githubToken)); - } - sendEvent('error', { message: errorMessage }); - } - res.end(); - }); - - gitProcess.on('error', (error) => { - if (error.code === 'ENOENT') { - sendEvent('error', { message: 'Git is not installed or not in PATH' }); - } else { - sendEvent('error', { message: error.message }); - } - res.end(); - }); - - req.on('close', () => { - gitProcess.kill(); - }); - - } catch (error) { - sendEvent('error', { message: error.message }); - res.end(); - } -}); - -/** - * Helper function to clone a GitHub repository - */ -function cloneGitHubRepository(githubUrl, destinationPath, githubToken = null) { - return new Promise((resolve, reject) => { - let cloneUrl = githubUrl; - - if (githubToken) { - try { - const url = new URL(githubUrl); - url.username = githubToken; - url.password = ''; - cloneUrl = url.toString(); - } catch (error) { - // SSH URL - use as-is - } - } - - const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, destinationPath], { - stdio: ['ignore', 'pipe', 'pipe'], - env: { - ...process.env, - GIT_TERMINAL_PROMPT: '0' - } - }); - - let stdout = ''; - let stderr = ''; - - gitProcess.stdout.on('data', (data) => { - stdout += data.toString(); - }); - - gitProcess.stderr.on('data', (data) => { - stderr += data.toString(); - }); - - gitProcess.on('close', (code) => { - if (code === 0) { - resolve({ stdout, stderr }); - } else { - let errorMessage = 'Git clone failed'; - - if (stderr.includes('Authentication failed') || stderr.includes('could not read Username')) { - errorMessage = 'Authentication failed. Please check your GitHub token.'; - } else if (stderr.includes('Repository not found')) { - errorMessage = 'Repository not found. Please check the URL and ensure you have access.'; - } else if (stderr.includes('already exists')) { - errorMessage = 'Directory already exists'; - } else if (stderr) { - errorMessage = stderr; - } - - reject(new Error(errorMessage)); - } - }); - - gitProcess.on('error', (error) => { - if (error.code === 'ENOENT') { - reject(new Error('Git is not installed or not in PATH')); - } else { - reject(error); - } - }); - }); -} - -export default router; diff --git a/server/routes/settings.js b/server/routes/settings.js index e2ce0885..d467c49c 100644 --- a/server/routes/settings.js +++ b/server/routes/settings.js @@ -1,5 +1,5 @@ import express from 'express'; -import { apiKeysDb, credentialsDb, notificationPreferencesDb, pushSubscriptionsDb } from '../database/db.js'; +import { apiKeysDb, credentialsDb, notificationPreferencesDb, pushSubscriptionsDb } from '../modules/database/index.js'; import { getPublicKey } from '../services/vapid-keys.js'; import { createNotificationEvent, notifyUserIfEnabled } from '../services/notification-orchestrator.js'; diff --git a/server/routes/taskmaster.js b/server/routes/taskmaster.js index 54f7153a..01a8d801 100644 --- a/server/routes/taskmaster.js +++ b/server/routes/taskmaster.js @@ -13,10 +13,25 @@ import fs from 'fs'; import path from 'path'; import { promises as fsPromises } from 'fs'; import { spawn } from 'child_process'; -import { extractProjectDirectory } from '../projects.js'; +import { projectsDb } from '../modules/database/index.js'; import { detectTaskMasterMCPServer } from '../utils/mcp-detector.js'; import { broadcastTaskMasterProjectUpdate, broadcastTaskMasterTasksUpdate } from '../utils/taskmaster-websocket.js'; +/** + * Resolve the absolute project directory from a DB-assigned `projectId`. + * + * TaskMaster routes used to accept a Claude-encoded folder name (`projectName`) + * and derive the path from JSONL history. After the projectId migration the + * only identifier we accept is the primary key of the `projects` table, so + * every handler calls this helper and 404s when the id is unknown. + */ +async function resolveProjectPathFromId(projectId) { + if (!projectId) { + return null; + } + return projectsDb.getProjectPathById(projectId); +} + const router = express.Router(); /** @@ -132,21 +147,22 @@ router.get('/installation-status', async (req, res) => { }); /** - * GET /api/taskmaster/tasks/:projectName + * GET /api/taskmaster/tasks/:projectId * Load actual tasks from .taskmaster/tasks/tasks.json + * + * `projectId` is the DB primary key of the project; the folder is resolved via + * the projects table rather than extracted from Claude JSONL history. */ -router.get('/tasks/:projectName', async (req, res) => { +router.get('/tasks/:projectId', async (req, res) => { try { - const { projectName } = req.params; - - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + const { projectId } = req.params; + + // Get project path via the DB; the legacy JSONL-based resolver is gone. + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -158,7 +174,7 @@ router.get('/tasks/:projectName', async (req, res) => { await fsPromises.access(tasksFilePath); } catch (error) { return res.json({ - projectName, + projectId, tasks: [], message: 'No tasks.json file found' }); @@ -213,7 +229,7 @@ router.get('/tasks/:projectName', async (req, res) => { })); res.json({ - projectName, + projectId, projectPath, tasks: transformedTasks, currentTag, @@ -247,21 +263,19 @@ router.get('/tasks/:projectName', async (req, res) => { }); /** - * GET /api/taskmaster/prd/:projectName + * GET /api/taskmaster/prd/:projectId * List all PRD files in the project's .taskmaster/docs directory */ -router.get('/prd/:projectName', async (req, res) => { +router.get('/prd/:projectId', async (req, res) => { try { - const { projectName } = req.params; - - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + const { projectId } = req.params; + + // projectId → projectPath lookup through the DB (post-migration). + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -272,7 +286,7 @@ router.get('/prd/:projectName', async (req, res) => { await fsPromises.access(docsPath, fs.constants.R_OK); } catch (error) { return res.json({ - projectName, + projectId, prdFiles: [], message: 'No .taskmaster/docs directory found' }); @@ -299,7 +313,7 @@ router.get('/prd/:projectName', async (req, res) => { } res.json({ - projectName, + projectId, projectPath, prdFiles: prdFiles.sort((a, b) => new Date(b.modified) - new Date(a.modified)), timestamp: new Date().toISOString() @@ -323,12 +337,12 @@ router.get('/prd/:projectName', async (req, res) => { }); /** - * POST /api/taskmaster/prd/:projectName + * POST /api/taskmaster/prd/:projectId * Create or update a PRD file in the project's .taskmaster/docs directory */ -router.post('/prd/:projectName', async (req, res) => { +router.post('/prd/:projectId', async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { fileName, content } = req.body; if (!fileName || !content) { @@ -346,14 +360,12 @@ router.post('/prd/:projectName', async (req, res) => { }); } - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + // Resolve the project folder through the DB using the projectId param. + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -379,7 +391,7 @@ router.post('/prd/:projectName', async (req, res) => { const stats = await fsPromises.stat(filePath); res.json({ - projectName, + projectId, projectPath, fileName, filePath: path.relative(projectPath, filePath), @@ -408,21 +420,18 @@ router.post('/prd/:projectName', async (req, res) => { }); /** - * GET /api/taskmaster/prd/:projectName/:fileName + * GET /api/taskmaster/prd/:projectId/:fileName * Get content of a specific PRD file */ -router.get('/prd/:projectName/:fileName', async (req, res) => { +router.get('/prd/:projectId/:fileName', async (req, res) => { try { - const { projectName, fileName } = req.params; - - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + const { projectId, fileName } = req.params; + + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -444,7 +453,7 @@ router.get('/prd/:projectName/:fileName', async (req, res) => { const stats = await fsPromises.stat(filePath); res.json({ - projectName, + projectId, projectPath, fileName, filePath: path.relative(projectPath, filePath), @@ -473,21 +482,18 @@ router.get('/prd/:projectName/:fileName', async (req, res) => { }); /** - * POST /api/taskmaster/init/:projectName + * POST /api/taskmaster/init/:projectId * Initialize TaskMaster in a project */ -router.post('/init/:projectName', async (req, res) => { +router.post('/init/:projectId', async (req, res) => { try { - const { projectName } = req.params; - - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + const { projectId } = req.params; + + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -522,17 +528,19 @@ router.post('/init/:projectName', async (req, res) => { initProcess.on('close', (code) => { if (code === 0) { - // Broadcast TaskMaster project update via WebSocket + // Broadcast TaskMaster project update via WebSocket. The + // WebSocket payload keeps using `projectId` so the frontend + // can match notifications against the current selection. if (req.app.locals.wss) { broadcastTaskMasterProjectUpdate( - req.app.locals.wss, - projectName, + req.app.locals.wss, + projectId, { hasTaskmaster: true, status: 'initialized' } ); } res.json({ - projectName, + projectId, projectPath, message: 'TaskMaster initialized successfully', output: stdout, @@ -562,12 +570,12 @@ router.post('/init/:projectName', async (req, res) => { }); /** - * POST /api/taskmaster/add-task/:projectName + * POST /api/taskmaster/add-task/:projectId * Add a new task to the project */ -router.post('/add-task/:projectName', async (req, res) => { +router.post('/add-task/:projectId', async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { prompt, title, description, priority = 'medium', dependencies } = req.body; if (!prompt && (!title || !description)) { @@ -576,15 +584,12 @@ router.post('/add-task/:projectName', async (req, res) => { message: 'Either "prompt" or both "title" and "description" are required' }); } - - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -629,16 +634,17 @@ router.post('/add-task/:projectName', async (req, res) => { console.log('Stderr:', stderr); if (code === 0) { - // Broadcast task update via WebSocket + // Broadcast task update via WebSocket using the projectId so + // clients subscribed to this project get notified immediately. if (req.app.locals.wss) { broadcastTaskMasterTasksUpdate( - req.app.locals.wss, - projectName + req.app.locals.wss, + projectId ); } res.json({ - projectName, + projectId, projectPath, message: 'Task added successfully', output: stdout, @@ -666,22 +672,19 @@ router.post('/add-task/:projectName', async (req, res) => { }); /** - * PUT /api/taskmaster/update-task/:projectName/:taskId + * PUT /api/taskmaster/update-task/:projectId/:taskId * Update a specific task using TaskMaster CLI */ -router.put('/update-task/:projectName/:taskId', async (req, res) => { +router.put('/update-task/:projectId/:taskId', async (req, res) => { try { - const { projectName, taskId } = req.params; + const { projectId, taskId } = req.params; const { title, description, status, priority, details } = req.body; - - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -707,11 +710,11 @@ router.put('/update-task/:projectName/:taskId', async (req, res) => { if (code === 0) { // Broadcast task update via WebSocket if (req.app.locals.wss) { - broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectName); + broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectId); } res.json({ - projectName, + projectId, projectPath, taskId, message: 'Task status updated successfully', @@ -759,11 +762,11 @@ router.put('/update-task/:projectName/:taskId', async (req, res) => { if (code === 0) { // Broadcast task update via WebSocket if (req.app.locals.wss) { - broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectName); + broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectId); } res.json({ - projectName, + projectId, projectPath, taskId, message: 'Task updated successfully', @@ -793,22 +796,19 @@ router.put('/update-task/:projectName/:taskId', async (req, res) => { }); /** - * POST /api/taskmaster/parse-prd/:projectName + * POST /api/taskmaster/parse-prd/:projectId * Parse a PRD file to generate tasks */ -router.post('/parse-prd/:projectName', async (req, res) => { +router.post('/parse-prd/:projectId', async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { fileName = 'prd.txt', numTasks, append = false } = req.body; - - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -859,13 +859,13 @@ router.post('/parse-prd/:projectName', async (req, res) => { // Broadcast task update via WebSocket if (req.app.locals.wss) { broadcastTaskMasterTasksUpdate( - req.app.locals.wss, - projectName + req.app.locals.wss, + projectId ); } res.json({ - projectName, + projectId, projectPath, prdFile: fileName, message: 'PRD parsed and tasks generated successfully', @@ -1340,12 +1340,12 @@ Description of the business problem, data sources, and expected insights. }); /** - * POST /api/taskmaster/apply-template/:projectName + * POST /api/taskmaster/apply-template/:projectId * Apply a PRD template to create a new PRD file */ -router.post('/apply-template/:projectName', async (req, res) => { +router.post('/apply-template/:projectId', async (req, res) => { try { - const { projectName } = req.params; + const { projectId } = req.params; const { templateId, fileName = 'prd.txt', customizations = {} } = req.body; if (!templateId) { @@ -1355,14 +1355,11 @@ router.post('/apply-template/:projectName', async (req, res) => { }); } - // Get project path - let projectPath; - try { - projectPath = await extractProjectDirectory(projectName); - } catch (error) { + const projectPath = await resolveProjectPathFromId(projectId); + if (!projectPath) { return res.status(404).json({ error: 'Project not found', - message: `Project "${projectName}" does not exist` + message: `Project "${projectId}" does not exist` }); } @@ -1401,7 +1398,7 @@ router.post('/apply-template/:projectName', async (req, res) => { await fsPromises.writeFile(filePath, content, 'utf8'); res.json({ - projectName, + projectId, projectPath, templateId, templateName: template.name, diff --git a/server/routes/user.js b/server/routes/user.js index 877cd45b..dcb8ecd7 100644 --- a/server/routes/user.js +++ b/server/routes/user.js @@ -1,5 +1,5 @@ import express from 'express'; -import { userDb } from '../database/db.js'; +import { userDb } from '../modules/database/index.js'; import { authenticateToken } from '../middleware/auth.js'; import { getSystemGitConfig } from '../utils/gitConfig.js'; import { spawn } from 'child_process'; diff --git a/server/services/notification-orchestrator.js b/server/services/notification-orchestrator.js index d3d47dd6..43a7d058 100644 --- a/server/services/notification-orchestrator.js +++ b/server/services/notification-orchestrator.js @@ -1,5 +1,6 @@ import webPush from 'web-push'; -import { notificationPreferencesDb, pushSubscriptionsDb, sessionNamesDb } from '../database/db.js'; + +import { notificationPreferencesDb, pushSubscriptionsDb, sessionsDb } from '../modules/database/index.js'; const KIND_TO_PREF_KEY = { action_required: 'actionRequired', @@ -107,7 +108,7 @@ function resolveSessionName(event) { return null; } - return normalizeSessionName(sessionNamesDb.getName(event.sessionId, event.provider)); + return normalizeSessionName(sessionsDb.getSessionName(event.sessionId, event.provider)); } function buildPushBody(event) { diff --git a/server/services/vapid-keys.js b/server/services/vapid-keys.js index 1abaeba1..8fce37e0 100644 --- a/server/services/vapid-keys.js +++ b/server/services/vapid-keys.js @@ -1,7 +1,8 @@ import webPush from 'web-push'; -import { db } from '../database/db.js'; +import { getConnection } from '../modules/database/connection.js'; let cachedKeys = null; +const db = getConnection(); function ensureVapidKeys() { if (cachedKeys) return cachedKeys; diff --git a/server/shared/interfaces.ts b/server/shared/interfaces.ts index 954b38a3..c5354dda 100644 --- a/server/shared/interfaces.ts +++ b/server/shared/interfaces.ts @@ -9,6 +9,7 @@ import type { UpsertProviderMcpServerInput, } from '@/shared/types.js'; +//----------------- PROVIDER CONTRACT INTERFACES ------------ /** * Main provider contract for CLI and SDK integrations. * @@ -20,11 +21,16 @@ export interface IProvider { readonly mcp: IProviderMcp; readonly auth: IProviderAuth; readonly sessions: IProviderSessions; + readonly sessionSynchronizer: IProviderSessionSynchronizer; } - +// --------------------------- +//----------------- PROVIDER AUTH INTERFACE ------------ /** * Auth contract for one provider. + * + * Implementations should return a complete installation/authentication status + * without throwing for normal "not installed" or "not authenticated" states. */ export interface IProviderAuth { /** @@ -33,8 +39,13 @@ export interface IProviderAuth { getStatus(): Promise; } +// --------------------------- +//----------------- PROVIDER MCP INTERFACE ------------ /** * MCP contract for one provider. + * + * Implementations must map provider-native MCP config formats to shared + * `ProviderMcpServer` records used by routes and frontend state. */ export interface IProviderMcp { listServers(options?: { workspacePath?: string }): Promise>; @@ -45,10 +56,37 @@ export interface IProviderMcp { ): Promise<{ removed: boolean; provider: LLMProvider; name: string; scope: McpScope }>; } +// --------------------------- +//----------------- PROVIDER SESSION INTERFACE ------------ /** * Session/history contract for one provider. + * + * Implementations normalize provider-specific events and message history into + * shared transport shapes consumed by API routes and realtime streams. */ export interface IProviderSessions { normalizeMessage(raw: unknown, sessionId: string | null): NormalizedMessage[]; fetchHistory(sessionId: string, options?: FetchHistoryOptions): Promise; } + +// --------------------------- +//----------------- PROVIDER SESSION SYNCHRONIZER INTERFACE ------------ +/** + * Session indexing contract for one provider. + * + * Implementations scan provider-specific session artifacts on disk and upsert + * normalized session metadata into the database. The service layer uses this + * interface for both full rescans and single-file incremental sync triggered + * by filesystem watcher events. + */ +export interface IProviderSessionSynchronizer { + /** + * Scans provider session artifacts and upserts discovered sessions into DB. + */ + synchronize(since?: Date): Promise; + + /** + * Parses and upserts one provider artifact file without running a full scan. + */ + synchronizeFile(filePath: string): Promise; +} diff --git a/server/shared/types.ts b/server/shared/types.ts index 7fe545c5..d15f69e7 100644 --- a/server/shared/types.ts +++ b/server/shared/types.ts @@ -1,18 +1,77 @@ -// -------------- HTTP API response shapes for the server, shared across modules -------------- +import type { IncomingMessage } from 'node:http'; +//----------------- HTTP RESPONSE SHAPES ------------ +/** + * Canonical success envelope used by backend APIs that return a structured payload. + * + * Use this for route handlers that need a stable `success/data` shape so frontend + * consumers can parse responses consistently across endpoints. + */ export type ApiSuccessShape = { success: true; data: TData; }; +/** + * Generic plain-object record used when parsing loosely typed JSON payloads. + * + * Use this only after runtime shape checks, not as a replacement for validated + * domain models. + */ export type AnyRecord = Record; -// --------------------------------------------------------------------------------------------- +// --------------------------- +//----------------- WEBSOCKET TRANSPORT TYPES ------------ +/** + * Minimal websocket client contract used by backend broadcaster services. + * + * Any transport object added to `connectedClients` must implement these two + * members so shared services can safely send JSON strings and check whether the + * socket is still open before broadcasting. + */ +export type RealtimeClientConnection = { + readyState: number; + send(data: string): void; +}; +/** + * Authenticated user payload attached to websocket upgrade requests. + * + * Platform and OSS auth flows currently use either `id` or `userId`; both are + * represented here so websocket handlers can resolve a stable writer user id. + */ +export type AuthenticatedWebSocketUser = { + id?: string | number; + userId?: string | number; + username?: string; + [key: string]: unknown; +}; + +/** + * HTTP upgrade request shape after websocket authentication succeeds. + * + * `verifyClient` populates `request.user` with the authenticated payload, and + * downstream websocket handlers rely on this extended request type. + */ +export type AuthenticatedWebSocketRequest = IncomingMessage & { + user?: AuthenticatedWebSocketUser; +}; + +// --------------------------- +//----------------- PROVIDER MESSAGE MODEL ------------ +/** + * Providers supported by the unified server runtime. + * + * Use this as the source of truth whenever a function or payload needs to identify + * a specific LLM integration. + */ export type LLMProvider = 'claude' | 'codex' | 'gemini' | 'cursor'; -// --------------------------------------------------------------------------------------------- - +/** + * Message/event variants emitted by provider adapters and normalized transports. + * + * Keep this union in sync with event kinds produced by provider session adapters. + */ export type MessageKind = | 'text' | 'tool_use' @@ -30,11 +89,10 @@ export type MessageKind = | 'task_notification'; /** - * Provider-neutral message event emitted over REST and realtime transports. + * Provider-neutral message envelope used in REST responses and realtime channels. * - * Providers all produce their own native SDK/CLI event shapes, so this type keeps - * the common envelope strict while allowing provider-specific details to ride - * along as optional properties. + * Every provider-specific message must be converted into this shape before being + * emitted outside provider-specific modules. */ export type NormalizedMessage = { id: string; @@ -73,21 +131,21 @@ export type NormalizedMessage = { }; /** - * Pagination and provider lookup options for reading persisted session history. + * Shared options used to fetch historical provider messages. + * + * Consumers should pass provider-specific lookup hints (`projectPath`) only + * when the selected provider requires them. */ export type FetchHistoryOptions = { - /** Claude project folder name. Required by Claude history lookup. */ - projectName?: string; - /** Absolute workspace path. Required by Cursor to compute its chat hash. */ projectPath?: string; - /** Page size. `null` means all messages. */ limit?: number | null; - /** Pagination offset from the newest messages. */ offset?: number; }; /** - * Provider-neutral history result returned by the unified messages endpoint. + * Standardized response payload returned from provider history readers. + * + * Use this as the contract for APIs that return paginated conversation history. */ export type FetchHistoryResult = { messages: NormalizedMessage[]; @@ -98,21 +156,40 @@ export type FetchHistoryResult = { tokenUsage?: unknown; }; -// --------------------------------------------------------------------------------------------- - +// --------------------------- +//----------------- SHARED ERROR TYPES ------------ +/** + * Optional metadata used when constructing application-level errors. + * + * `statusCode` should reflect the HTTP response status, while `code` identifies + * the stable machine-readable error category. + */ export type AppErrorOptions = { code?: string; statusCode?: number; details?: unknown; }; -// -------------------- MCP related shared types -------------------- +// --------------------------- +//----------------- MCP TYPES ------------ +/** + * Scope where an MCP server definition is stored and resolved. + * + * `user` is global for a user account, `local` is provider-local, and `project` + * is tied to a specific project path. + */ export type McpScope = 'user' | 'local' | 'project'; +/** + * Transport protocol used by an MCP server definition. + */ export type McpTransport = 'stdio' | 'http' | 'sse'; /** - * Provider MCP server descriptor normalized for frontend consumption. + * Normalized MCP server model exposed to frontend and route handlers. + * + * Provider adapters should map provider-native config to this structure before + * returning results. */ export type ProviderMcpServer = { provider: LLMProvider; @@ -131,7 +208,10 @@ export type ProviderMcpServer = { }; /** - * Shared payload shape for MCP server create/update operations. + * Payload for create/update MCP server operations. + * + * Routes and services should accept this type, validate it, and then persist it + * through provider-specific MCP repositories. */ export type UpsertProviderMcpServerInput = { name: string; @@ -149,18 +229,13 @@ export type UpsertProviderMcpServerInput = { envHttpHeaders?: Record; }; -// --------------------------------------------------------------------------------------------- - -// -------------------- Provider auth status types -------------------- +// --------------------------- +//----------------- PROVIDER AUTH TYPES ------------ /** - * Result of a provider status check (installation + authentication). + * Authentication status result returned by provider health checks. * - * installed - Whether the provider's CLI/SDK is available - * provider - Provider id the status belongs to - * authenticated - Whether valid credentials exist - * email - User email or auth method identifier - * method - Auth method (e.g. 'api_key', 'credentials_file') - * [error] - Error message if not installed or not authenticated + * This shape is consumed by settings/status endpoints to report installation and + * credential state for each provider. */ export type ProviderAuthStatus = { installed: boolean; @@ -170,3 +245,83 @@ export type ProviderAuthStatus = { method: string | null; error?: string; }; + +// --------------------------- +//----------------- SHARED DATABASE CREDENTIAL TYPES ------------ +/** + * Safe credential view returned by credential listing APIs. + * + * This intentionally excludes the raw credential secret while still exposing + * metadata needed for UI rendering and management operations. + */ +export type CredentialPublicRow = { + id: number; + credential_name: string; + credential_type: string; + description: string | null; + created_at: string; + is_active: number; +}; + +/** + * Result returned after creating a credential record. + * + * Use this return shape when callers need the created id and display metadata, + * but must never receive the stored secret value. + */ +export type CreateCredentialResult = { + id: number | bigint; + credentialName: string; + credentialType: string; +}; + +// --------------------------- +//----------------- PROJECT PERSISTENCE TYPES ------------ +/** + * Canonical project row shape returned by the projects repository. + * + * Use this type whenever backend services need to pass around one database + * project record without leaking raw SQL row typing across modules. + */ +export type ProjectRepositoryRow = { + project_id: string; + project_path: string; + custom_project_name: string | null; + isStarred: number; + isArchived: number; +}; + +/** + * Result category returned by `projectsDb.createProjectPath`. + * + * `created` means a fresh row was inserted, `reactivated_archived` means an + * existing archived path was accepted and updated, and `active_conflict` means + * an already-active path blocked project creation. + */ +export type CreateProjectPathOutcome = + | 'created' + | 'reactivated_archived' + | 'active_conflict'; + +/** + * Structured result returned by project-path upsert operations. + * + * Services should use this result to decide whether a request succeeded, + * should return a conflict, or needs follow-up retrieval of row metadata. + */ +export type CreateProjectPathResult = { + outcome: CreateProjectPathOutcome; + project: ProjectRepositoryRow | null; +}; + +/** + * Validation result for user-supplied workspace/project paths. + * + * `resolvedPath` is present only when validation succeeds. `error` is present + * only when validation fails and is suitable for user-facing diagnostics. + */ +export type WorkspacePathValidationResult = { + valid: boolean; + resolvedPath?: string; + error?: string; +}; diff --git a/server/shared/utils.ts b/server/shared/utils.ts index de6aed56..84a382c3 100644 --- a/server/shared/utils.ts +++ b/server/shared/utils.ts @@ -1,7 +1,19 @@ - import { randomUUID } from 'node:crypto'; -import { mkdir, readFile, writeFile } from 'node:fs/promises'; +import fs from 'node:fs'; +import { + access, + lstat, + mkdir, + readFile, + readdir, + readlink, + realpath, + stat, + writeFile, +} from 'node:fs/promises'; +import os from 'node:os'; import path from 'node:path'; +import readline from 'node:readline'; import type { NextFunction, Request, RequestHandler, Response } from 'express'; @@ -10,8 +22,17 @@ import type { ApiSuccessShape, AppErrorOptions, NormalizedMessage, + WorkspacePathValidationResult, } from '@/shared/types.js'; +//----------------- NORMALIZED MESSAGE HELPER INPUT TYPES ------------ +/** + * Input payload accepted by `createNormalizedMessage`. + * + * Callers provide provider-specific fields plus the required `kind/provider` + * pair; this helper fills missing envelope fields (`id`, `sessionId`, + * `timestamp`) in a consistent way. + */ type NormalizedMessageInput = { kind: NormalizedMessage['kind']; @@ -21,6 +42,14 @@ type NormalizedMessageInput = timestamp?: string | null; } & Record; +// --------------------------- +//----------------- HTTP HANDLER UTILITIES ------------ +/** + * Wraps arbitrary data in the standard API success envelope. + * + * Use this helper in route handlers to keep successful JSON responses consistent + * across endpoints. + */ export function createApiSuccessResponse( data: TData, ): ApiSuccessShape { @@ -30,6 +59,12 @@ export function createApiSuccessResponse( }; } +/** + * Converts an async Express handler into a standard `RequestHandler` and routes + * rejected promises to Express error middleware. + * + * Use this to avoid repeating `try/catch(next)` in every async route. + */ export function asyncHandler( handler: (req: Request, res: Response, next: NextFunction) => Promise ): RequestHandler { @@ -38,7 +73,14 @@ export function asyncHandler( }; } -// --------- Global app error class for consistent error handling across the server --------- +// --------------------------- +//----------------- SHARED ERROR UTILITIES ------------ +/** + * Shared application error with HTTP status and machine-readable code metadata. + * + * Throw this from service/route layers when the caller should receive a + * controlled error response rather than a generic 500. + */ export class AppError extends Error { readonly code: string; readonly statusCode: number; @@ -53,9 +95,226 @@ export class AppError extends Error { } } -// ------------------------------------------------------------------------------------------- +// --------------------------- +//----------------- WORKSPACE PATH VALIDATION UTILITIES ------------ +/** + * Root directory that all workspace/project paths must stay under. + * + * This is resolved from `WORKSPACES_ROOT` when configured; otherwise it falls + * back to the current user's home directory. + */ +export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir(); -// ------------------------ Normalized provider message helpers ------------------------ +/** + * System-critical paths that must never be used as workspace roots. + * + * The validation helper blocks these values directly and also blocks paths + * nested under them (with explicit allow-list exceptions where necessary). + */ +export const FORBIDDEN_WORKSPACE_PATHS = [ + // Unix + '/', + '/etc', + '/bin', + '/sbin', + '/usr', + '/dev', + '/proc', + '/sys', + '/var', + '/boot', + '/root', + '/lib', + '/lib64', + '/opt', + '/tmp', + '/run', + // Windows + 'C:\\Windows', + 'C:\\Program Files', + 'C:\\Program Files (x86)', + 'C:\\ProgramData', + 'C:\\System Volume Information', + 'C:\\$Recycle.Bin', +]; + +function stripWindowsLongPathPrefix(inputPath: string): string { + if (inputPath.startsWith('\\\\?\\UNC\\')) { + return `\\\\${inputPath.slice('\\\\?\\UNC\\'.length)}`; + } + + if (inputPath.startsWith('\\\\?\\')) { + return inputPath.slice('\\\\?\\'.length); + } + + return inputPath; +} + +function shouldUseWindowsPathNormalization(inputPath: string): boolean { + if (process.platform === 'win32') { + return true; + } + + return inputPath.startsWith('\\\\') || /^[a-zA-Z]:([\\/]|$)/.test(inputPath); +} + +/** + * Canonicalizes project/workspace paths for stable DB keys and comparisons. + * + * Normalization rules: + * - trim whitespace + * - strip Windows long-path prefixes (`\\?\` and `\\?\UNC\`) + * - normalize path separators and dot segments + * - trim trailing separators except for filesystem roots + */ +export function normalizeProjectPath(inputPath: string): string { + if (typeof inputPath !== 'string') { + return ''; + } + + const trimmed = inputPath.trim(); + if (!trimmed) { + return ''; + } + + const withoutLongPrefix = stripWindowsLongPathPrefix(trimmed); + const useWindowsPathRules = shouldUseWindowsPathNormalization(withoutLongPrefix); + const normalized = useWindowsPathRules + ? path.win32.normalize(withoutLongPrefix) + : path.posix.normalize(withoutLongPrefix); + + if (!normalized) { + return ''; + } + + const parser = useWindowsPathRules ? path.win32 : path.posix; + const root = parser.parse(normalized).root; + if (normalized === root) { + return normalized; + } + + return normalized.replace(/[\\/]+$/, ''); +} + +/** + * Validates that a user-supplied workspace path is safe to use. + * + * Call this before any filesystem mutation that creates or registers projects. + * The function resolves symlinks, enforces `WORKSPACES_ROOT` containment, and + * blocks known system directories. + */ +export async function validateWorkspacePath(requestedPath: string): Promise { + try { + const normalizedRequestedPath = normalizeProjectPath(requestedPath); + if (!normalizedRequestedPath) { + return { + valid: false, + error: 'Workspace path is required', + }; + } + + const absolutePath = path.resolve(normalizedRequestedPath); + const normalizedPath = normalizeProjectPath(absolutePath); + + if (FORBIDDEN_WORKSPACE_PATHS.includes(normalizedPath) || normalizedPath === '/') { + return { + valid: false, + error: 'Cannot use system-critical directories as workspace locations', + }; + } + + for (const forbiddenPath of FORBIDDEN_WORKSPACE_PATHS) { + const normalizedForbiddenPath = normalizeProjectPath(forbiddenPath); + if ( + normalizedPath === normalizedForbiddenPath + || normalizedPath.startsWith(`${normalizedForbiddenPath}${path.sep}`) + ) { + // Allow specific user-writable folders under /var. + if ( + normalizedForbiddenPath === '/var' + && (normalizedPath.startsWith('/var/tmp') || normalizedPath.startsWith('/var/folders')) + ) { + continue; + } + + return { + valid: false, + error: `Cannot create workspace in system directory: ${forbiddenPath}`, + }; + } + } + + let resolvedPath = normalizeProjectPath(absolutePath); + try { + await access(absolutePath); + resolvedPath = normalizeProjectPath(await realpath(absolutePath)); + } catch (error) { + const fileError = error as NodeJS.ErrnoException; + if (fileError.code !== 'ENOENT') { + throw fileError; + } + + const parentPath = path.dirname(absolutePath); + try { + const parentRealPath = await realpath(parentPath); + resolvedPath = normalizeProjectPath(path.join(parentRealPath, path.basename(absolutePath))); + } catch (parentError) { + const parentFileError = parentError as NodeJS.ErrnoException; + if (parentFileError.code !== 'ENOENT') { + throw parentFileError; + } + } + } + + const resolvedWorkspaceRoot = normalizeProjectPath(await realpath(WORKSPACES_ROOT)); + if ( + !resolvedPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`) + && resolvedPath !== resolvedWorkspaceRoot + ) { + return { + valid: false, + error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}`, + }; + } + + try { + await access(absolutePath); + const pathStats = await lstat(absolutePath); + if (pathStats.isSymbolicLink()) { + const symlinkTarget = await readlink(absolutePath); + const resolvedSymlinkPath = path.resolve(path.dirname(absolutePath), symlinkTarget); + const realSymlinkPath = await realpath(resolvedSymlinkPath); + if ( + !realSymlinkPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`) + && realSymlinkPath !== resolvedWorkspaceRoot + ) { + return { + valid: false, + error: 'Symlink target is outside the allowed workspace root', + }; + } + } + } catch (error) { + const fileError = error as NodeJS.ErrnoException; + if (fileError.code !== 'ENOENT') { + throw fileError; + } + } + + return { + valid: true, + resolvedPath, + }; + } catch (error) { + return { + valid: false, + error: `Path validation failed: ${(error as Error).message}`, + }; + } +} + +// --------------------------- +//----------------- NORMALIZED PROVIDER MESSAGE UTILITIES ------------ /** * Generates a stable unique id for normalized provider messages. */ @@ -80,9 +339,8 @@ export function createNormalizedMessage(fields: NormalizedMessageInput): Normali }; } -// ------------------------------------------------------------------------------------------- - -// ------------------------ The following are mainly for provider MCP runtimes ------------------------ +// --------------------------- +//----------------- MCP CONFIG PARSING UTILITIES ------------ /** * Safely narrows an unknown value to a plain object record. * @@ -154,6 +412,62 @@ export const readStringRecord = (value: unknown): Record | undef return Object.keys(normalized).length > 0 ? normalized : undefined; }; +// --------------------------- +//----------------- WEBSOCKET PAYLOAD PARSING UTILITIES ------------ +/** + * Parses one websocket message payload into a plain JSON object record. + * + * Use this in realtime handlers that receive raw websocket payloads as `string`, + * `Buffer`, `ArrayBuffer`, or chunk arrays. The helper converts supported + * payload formats to UTF-8 text, parses JSON, and returns only object payloads. + * Primitive/array/invalid payloads return `null` so callers can handle bad input + * without throwing from deeply nested message handlers. + */ +export const parseIncomingJsonObject = (payload: unknown): AnyRecord | null => { + let text: string | null = null; + + if (typeof payload === 'string') { + text = payload; + } else if (Buffer.isBuffer(payload)) { + text = payload.toString('utf8'); + } else if (payload instanceof ArrayBuffer) { + text = Buffer.from(payload).toString('utf8'); + } else if (Array.isArray(payload)) { + const buffers = payload + .map((entry) => { + if (Buffer.isBuffer(entry)) { + return entry; + } + + if (entry instanceof ArrayBuffer) { + return Buffer.from(entry); + } + + if (ArrayBuffer.isView(entry)) { + return Buffer.from(entry.buffer, entry.byteOffset, entry.byteLength); + } + + return null; + }) + .filter((entry): entry is Buffer => entry !== null); + + if (buffers.length > 0) { + text = Buffer.concat(buffers).toString('utf8'); + } + } + + if (typeof text !== 'string' || text.trim().length === 0) { + return null; + } + + try { + const parsed = JSON.parse(text) as unknown; + return readObjectRecord(parsed); + } catch { + return null; + } +}; + /** * Reads a JSON config file and guarantees a plain object result. * @@ -189,5 +503,167 @@ export const writeJsonConfig = async (filePath: string, data: Record { + try { + const entries = await readdir(rootDir, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = path.join(rootDir, entry.name); + + if (entry.isDirectory()) { + await findFilesRecursivelyCreatedAfter(fullPath, extension, lastScanAt, fileList); + continue; + } + + if (!entry.isFile() || !entry.name.endsWith(extension)) { + continue; + } + + if (!lastScanAt) { + fileList.push(fullPath); + continue; + } + + const fileStat = await stat(fullPath); + if (fileStat.birthtime > lastScanAt) { + fileList.push(fullPath); + } + } + } catch { + // Missing provider folders are expected in first-run or partial setups. + } + + return fileList; +} + +/** + * Reads file creation/update timestamps and maps them to DB-friendly ISO strings. + * + * Session indexers use this to persist `created_at` and `updated_at` metadata + * when upserting sessions. If the file cannot be read, an empty object is + * returned so indexing can continue for other files. + */ +export async function readFileTimestamps( + filePath: string +): Promise<{ createdAt?: string; updatedAt?: string }> { + try { + const fileStat = await stat(filePath); + return { + createdAt: fileStat.birthtime.toISOString(), + updatedAt: fileStat.mtime.toISOString(), + }; + } catch { + return {}; + } +} + +// --------------------------- +//----------------- SESSION SYNCHRONIZER JSONL PARSING HELPERS ------------ +/** + * Builds a first-seen key/value lookup map from a JSONL file. + * + * Use this for provider index files where session id -> display name metadata + * is stored line-by-line. The first value for each key wins, preserving the + * earliest known label while avoiding repeated map overwrites. + */ +export async function buildLookupMap( + filePath: string, + keyField: string, + valueField: string +): Promise> { + const lookup = new Map(); + + try { + const fileStream = fs.createReadStream(filePath); + const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity }); + + for await (const line of lineReader) { + const trimmed = line.trim(); + if (!trimmed) { + continue; + } + + const parsed = JSON.parse(trimmed) as Record; + const key = parsed[keyField]; + const value = parsed[valueField]; + + if (typeof key === 'string' && typeof value === 'string' && !lookup.has(key)) { + lookup.set(key, value); + } + } + } catch { + // Missing or unreadable lookup files should not block session sync. + } + + return lookup; +} + +/** + * Reads a JSONL file and returns the first extracted payload that matches caller criteria. + * + * The caller supplies an `extractor` that validates provider-specific row + * shapes. This helper centralizes line-by-line parsing and lets indexers stop + * scanning as soon as one valid row is found. + */ +export async function extractFirstValidJsonlData( + filePath: string, + extractor: (parsedJson: unknown) => T | null | undefined +): Promise { + try { + const fileStream = fs.createReadStream(filePath); + const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity }); + + for await (const line of lineReader) { + const trimmed = line.trim(); + if (!trimmed) { + continue; + } + + const parsed = JSON.parse(trimmed); + const extracted = extractor(parsed); + if (extracted) { + lineReader.close(); + fileStream.close(); + return extracted; + } + } + } catch { + // Ignore malformed or missing artifacts so full scans keep progressing. + } + + return null; +} diff --git a/server/utils/taskmaster-websocket.js b/server/utils/taskmaster-websocket.js index 87c05498..001b3ecc 100644 --- a/server/utils/taskmaster-websocket.js +++ b/server/utils/taskmaster-websocket.js @@ -7,20 +7,25 @@ */ /** - * Broadcast TaskMaster project update to all connected clients + * Broadcast TaskMaster project update to all connected clients. + * + * The payload key is `projectId` post-migration so frontend listeners can + * match notifications against the DB-assigned project identifier they + * already use everywhere else. + * * @param {WebSocket.Server} wss - WebSocket server instance - * @param {string} projectName - Name of the updated project + * @param {string} projectId - DB id of the updated project * @param {Object} taskMasterData - Updated TaskMaster data */ -export function broadcastTaskMasterProjectUpdate(wss, projectName, taskMasterData) { - if (!wss || !projectName) { - console.warn('TaskMaster WebSocket broadcast: Missing wss or projectName'); +export function broadcastTaskMasterProjectUpdate(wss, projectId, taskMasterData) { + if (!wss || !projectId) { + console.warn('TaskMaster WebSocket broadcast: Missing wss or projectId'); return; } const message = { type: 'taskmaster-project-updated', - projectName, + projectId, taskMasterData, timestamp: new Date().toISOString() }; @@ -38,20 +43,21 @@ export function broadcastTaskMasterProjectUpdate(wss, projectName, taskMasterDat } /** - * Broadcast TaskMaster tasks update for a specific project - * @param {WebSocket.Server} wss - WebSocket server instance - * @param {string} projectName - Name of the project with updated tasks + * Broadcast TaskMaster tasks update for a specific project. + * + * @param {WebSocket.Server} wss - WebSocket server instance + * @param {string} projectId - DB id of the project with updated tasks * @param {Object} tasksData - Updated tasks data */ -export function broadcastTaskMasterTasksUpdate(wss, projectName, tasksData) { - if (!wss || !projectName) { - console.warn('TaskMaster WebSocket broadcast: Missing wss or projectName'); +export function broadcastTaskMasterTasksUpdate(wss, projectId, tasksData) { + if (!wss || !projectId) { + console.warn('TaskMaster WebSocket broadcast: Missing wss or projectId'); return; } const message = { type: 'taskmaster-tasks-updated', - projectName, + projectId, tasksData, timestamp: new Date().toISOString() }; diff --git a/src/components/chat/hooks/useChatComposerState.ts b/src/components/chat/hooks/useChatComposerState.ts index 3b167215..c53cd01d 100644 --- a/src/components/chat/hooks/useChatComposerState.ts +++ b/src/components/chat/hooks/useChatComposerState.ts @@ -135,7 +135,9 @@ export function useChatComposerState({ }: UseChatComposerStateArgs) { const [input, setInput] = useState(() => { if (typeof window !== 'undefined' && selectedProject) { - return safeLocalStorage.getItem(`draft_input_${selectedProject.name}`) || ''; + // Draft inputs are keyed by the DB projectId so per-project drafts + // survive display-name changes. + return safeLocalStorage.getItem(`draft_input_${selectedProject.projectId}`) || ''; } return ''; }); @@ -276,9 +278,11 @@ export function useChatComposerState({ const args = commandMatch && commandMatch[1] ? commandMatch[1].trim().split(/\s+/) : []; + // The `/api/commands/execute` context sends `projectId` now instead of + // a folder-derived project name; the path is still included verbatim. const context = { projectPath: selectedProject.fullPath || selectedProject.path, - projectName: selectedProject.name, + projectId: selectedProject.projectId, sessionId: currentSessionId, provider, model: provider === 'cursor' ? cursorModel : provider === 'codex' ? codexModel : provider === 'gemini' ? geminiModel : claudeModel, @@ -503,7 +507,7 @@ export function useChatComposerState({ }); try { - const response = await authenticatedFetch(`/api/projects/${selectedProject.name}/upload-images`, { + const response = await authenticatedFetch(`/api/projects/${selectedProject.projectId}/upload-images`, { method: 'POST', headers: {}, body: formData, @@ -669,7 +673,7 @@ export function useChatComposerState({ textareaRef.current.style.height = 'auto'; } - safeLocalStorage.removeItem(`draft_input_${selectedProject.name}`); + safeLocalStorage.removeItem(`draft_input_${selectedProject.projectId}`); }, [ selectedSession, @@ -712,22 +716,22 @@ export function useChatComposerState({ if (!selectedProject) { return; } - const savedInput = safeLocalStorage.getItem(`draft_input_${selectedProject.name}`) || ''; + const savedInput = safeLocalStorage.getItem(`draft_input_${selectedProject.projectId}`) || ''; setInput((previous) => { const next = previous === savedInput ? previous : savedInput; inputValueRef.current = next; return next; }); - }, [selectedProject?.name]); + }, [selectedProject?.projectId]); useEffect(() => { if (!selectedProject) { return; } if (input !== '') { - safeLocalStorage.setItem(`draft_input_${selectedProject.name}`, input); + safeLocalStorage.setItem(`draft_input_${selectedProject.projectId}`, input); } else { - safeLocalStorage.removeItem(`draft_input_${selectedProject.name}`); + safeLocalStorage.removeItem(`draft_input_${selectedProject.projectId}`); } }, [input, selectedProject]); diff --git a/src/components/chat/hooks/useChatSessionState.ts b/src/components/chat/hooks/useChatSessionState.ts index b551060a..3ad66f82 100644 --- a/src/components/chat/hooks/useChatSessionState.ts +++ b/src/components/chat/hooks/useChatSessionState.ts @@ -241,7 +241,8 @@ export function useChatSessionState({ try { const slot = await sessionStore.fetchMore(selectedSession.id, { provider: sessionProvider as LLMProvider, - projectName: selectedProject.name, + // DB-assigned projectId replaces the legacy folder-derived name. + projectId: selectedProject.projectId, projectPath: selectedProject.fullPath || selectedProject.path || '', limit: MESSAGES_PER_PAGE, }); @@ -296,7 +297,7 @@ export function useChatSessionState({ topLoadLockRef.current = false; pendingScrollRestoreRef.current = null; setIsUserScrolledUp(false); - }, [selectedProject?.name, selectedSession?.id]); + }, [selectedProject?.projectId, selectedSession?.id]); // Initial scroll to bottom useEffect(() => { @@ -325,7 +326,7 @@ export function useChatSessionState({ } const provider = (selectedSession.__provider || localStorage.getItem('selected-provider') as Provider) || 'claude'; - const sessionKey = `${selectedSession.id}:${selectedProject.name}:${provider}`; + const sessionKey = `${selectedSession.id}:${selectedProject.projectId}:${provider}`; // Skip if already loaded and fresh if (lastLoadedSessionKeyRef.current === sessionKey && sessionStore.has(selectedSession.id) && !sessionStore.isStale(selectedSession.id)) { @@ -375,7 +376,7 @@ export function useChatSessionState({ setIsLoadingSessionMessages(true); sessionStore.fetchFromServer(selectedSession.id, { provider: (selectedSession.__provider || provider) as LLMProvider, - projectName: selectedProject.name, + projectId: selectedProject.projectId, projectPath: selectedProject.fullPath || selectedProject.path || '', limit: MESSAGES_PER_PAGE, offset: 0, @@ -411,7 +412,7 @@ export function useChatSessionState({ if (!isLoading) { await sessionStore.refreshFromServer(selectedSession.id, { provider: (selectedSession.__provider || provider) as LLMProvider, - projectName: selectedProject.name, + projectId: selectedProject.projectId, projectPath: selectedProject.fullPath || selectedProject.path || '', }); @@ -469,7 +470,7 @@ export function useChatSessionState({ // Load all messages into the store for search navigation const slot = await sessionStore.fetchFromServer(selectedSession.id, { provider: sessionProvider as LLMProvider, - projectName: selectedProject.name, + projectId: selectedProject.projectId, projectPath: selectedProject.fullPath || selectedProject.path || '', limit: null, offset: 0, @@ -550,7 +551,8 @@ export function useChatSessionState({ const fetchInitialTokenUsage = async () => { try { - const url = `/api/projects/${selectedProject.name}/sessions/${selectedSession.id}/token-usage`; + // Token usage endpoint is now keyed by the DB projectId. + const url = `/api/projects/${selectedProject.projectId}/sessions/${selectedSession.id}/token-usage`; const response = await authenticatedFetch(url); if (response.ok) { setTokenBudget(await response.json()); @@ -656,7 +658,7 @@ export function useChatSessionState({ try { const slot = await sessionStore.fetchFromServer(requestSessionId, { provider: sessionProvider as LLMProvider, - projectName: selectedProject.name, + projectId: selectedProject.projectId, projectPath: selectedProject.fullPath || selectedProject.path || '', limit: null, offset: 0, diff --git a/src/components/chat/hooks/useFileMentions.tsx b/src/components/chat/hooks/useFileMentions.tsx index c53f4c7b..4061605d 100644 --- a/src/components/chat/hooks/useFileMentions.tsx +++ b/src/components/chat/hooks/useFileMentions.tsx @@ -59,16 +59,18 @@ export function useFileMentions({ selectedProject, input, setInput, textareaRef const abortController = new AbortController(); const fetchProjectFiles = async () => { - const projectName = selectedProject?.name; + // File list is keyed by DB projectId now; the backend resolves it to + // the project's path before reading. + const projectId = selectedProject?.projectId; setFileList([]); setFilteredFiles([]); - if (!projectName) { + if (!projectId) { return; } try { - const response = await api.getFiles(projectName, { signal: abortController.signal }); + const response = await api.getFiles(projectId, { signal: abortController.signal }); if (!response.ok) { return; } @@ -88,7 +90,7 @@ export function useFileMentions({ selectedProject, input, setInput, textareaRef return () => { abortController.abort(); }; - }, [selectedProject?.name]); + }, [selectedProject?.projectId]); useEffect(() => { const textBeforeCursor = input.slice(0, cursorPosition); diff --git a/src/components/chat/hooks/useSlashCommands.ts b/src/components/chat/hooks/useSlashCommands.ts index 067cd24d..89408420 100644 --- a/src/components/chat/hooks/useSlashCommands.ts +++ b/src/components/chat/hooks/useSlashCommands.ts @@ -114,7 +114,7 @@ export function useSlashCommands({ })), ]; - const parsedHistory = readCommandHistory(selectedProject.name); + const parsedHistory = readCommandHistory(selectedProject.projectId); const sortedCommands = [...allCommands].sort((commandA, commandB) => { const commandAUsage = parsedHistory[commandA.name] || 0; const commandBUsage = parsedHistory[commandB.name] || 0; @@ -173,7 +173,7 @@ export function useSlashCommands({ return []; } - const parsedHistory = readCommandHistory(selectedProject.name); + const parsedHistory = readCommandHistory(selectedProject.projectId); return slashCommands .map((command) => ({ @@ -191,9 +191,9 @@ export function useSlashCommands({ return; } - const parsedHistory = readCommandHistory(selectedProject.name); + const parsedHistory = readCommandHistory(selectedProject.projectId); parsedHistory[command.name] = (parsedHistory[command.name] || 0) + 1; - saveCommandHistory(selectedProject.name, parsedHistory); + saveCommandHistory(selectedProject.projectId, parsedHistory); }, [selectedProject], ); diff --git a/src/components/chat/view/ChatInterface.tsx b/src/components/chat/view/ChatInterface.tsx index 1b3ae95c..2e923d7a 100644 --- a/src/components/chat/view/ChatInterface.tsx +++ b/src/components/chat/view/ChatInterface.tsx @@ -212,7 +212,8 @@ function ChatInterface({ const providerVal = (localStorage.getItem('selected-provider') as LLMProvider) || 'claude'; await sessionStore.refreshFromServer(selectedSession.id, { provider: (selectedSession.__provider || providerVal) as LLMProvider, - projectName: selectedProject.name, + // Use DB projectId; legacy folder-derived projectName is no longer accepted here. + projectId: selectedProject.projectId, projectPath: selectedProject.fullPath || selectedProject.path || '', }); setIsLoading(false); diff --git a/src/components/code-editor/hooks/useCodeEditorDocument.ts b/src/components/code-editor/hooks/useCodeEditorDocument.ts index 5e3adc3e..b2b7acd2 100644 --- a/src/components/code-editor/hooks/useCodeEditorDocument.ts +++ b/src/components/code-editor/hooks/useCodeEditorDocument.ts @@ -23,7 +23,10 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume const [saveSuccess, setSaveSuccess] = useState(false); const [saveError, setSaveError] = useState(null); const [isBinary, setIsBinary] = useState(false); - const fileProjectName = file.projectName ?? projectPath; + // `fileProjectId` is the DB primary key passed down from the editor sidebar; + // the fallback to `projectPath` preserves older callers that didn't yet + // propagate the identifier. + const fileProjectId = file.projectId ?? projectPath; const filePath = file.path; const fileName = file.name; const fileDiffNewString = file.diffInfo?.new_string; @@ -49,11 +52,11 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume return; } - if (!fileProjectName) { + if (!fileProjectId) { throw new Error('Missing project identifier'); } - const response = await api.readFile(fileProjectName, filePath); + const response = await api.readFile(fileProjectId, filePath); if (!response.ok) { throw new Error(`Failed to load file: ${response.status} ${response.statusText}`); } @@ -70,18 +73,18 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume }; loadFileContent(); - }, [file.diffInfo, file.name, fileDiffNewString, fileDiffOldString, fileName, filePath, fileProjectName]); + }, [file.diffInfo, file.name, fileDiffNewString, fileDiffOldString, fileName, filePath, fileProjectId]); const handleSave = useCallback(async () => { setSaving(true); setSaveError(null); try { - if (!fileProjectName) { + if (!fileProjectId) { throw new Error('Missing project identifier'); } - const response = await api.saveFile(fileProjectName, filePath, content); + const response = await api.saveFile(fileProjectId, filePath, content); if (!response.ok) { const contentType = response.headers.get('content-type'); @@ -106,7 +109,7 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume } finally { setSaving(false); } - }, [content, filePath, fileProjectName]); + }, [content, filePath, fileProjectId]); const handleDownload = useCallback(() => { const blob = new Blob([content], { type: 'text/plain' }); diff --git a/src/components/code-editor/hooks/useEditorSidebar.ts b/src/components/code-editor/hooks/useEditorSidebar.ts index d5a650b4..87e4303d 100644 --- a/src/components/code-editor/hooks/useEditorSidebar.ts +++ b/src/components/code-editor/hooks/useEditorSidebar.ts @@ -29,11 +29,13 @@ export const useEditorSidebar = ({ setEditingFile({ name: fileName, path: filePath, - projectName: selectedProject?.name, + // DB projectId is forwarded to the editor so it can read/save files + // via `/api/projects/:projectId/file` endpoints. + projectId: selectedProject?.projectId, diffInfo, }); }, - [selectedProject?.name], + [selectedProject?.projectId], ); const handleCloseEditor = useCallback(() => { diff --git a/src/components/code-editor/types/types.ts b/src/components/code-editor/types/types.ts index 8427a5e0..799868c5 100644 --- a/src/components/code-editor/types/types.ts +++ b/src/components/code-editor/types/types.ts @@ -7,7 +7,9 @@ export type CodeEditorDiffInfo = { export type CodeEditorFile = { name: string; path: string; - projectName?: string; + // DB projectId; used by the editor to build `/api/projects/:projectId/file` + // URLs for reading and saving content. + projectId?: string; diffInfo?: CodeEditorDiffInfo | null; [key: string]: unknown; }; diff --git a/src/components/file-tree/hooks/useFileTreeData.ts b/src/components/file-tree/hooks/useFileTreeData.ts index 2ac88162..0a7a9b86 100644 --- a/src/components/file-tree/hooks/useFileTreeData.ts +++ b/src/components/file-tree/hooks/useFileTreeData.ts @@ -20,9 +20,11 @@ export function useFileTreeData(selectedProject: Project | null): UseFileTreeDat }, []); useEffect(() => { - const projectName = selectedProject?.name; + // File-tree requests use the DB projectId; the backend resolves it to the + // project's absolute path through the projects table. + const projectId = selectedProject?.projectId; - if (!projectName) { + if (!projectId) { setFiles([]); setLoading(false); return; @@ -42,7 +44,7 @@ export function useFileTreeData(selectedProject: Project | null): UseFileTreeDat setLoading(true); } try { - const response = await api.getFiles(projectName, { signal: abortControllerRef.current!.signal }); + const response = await api.getFiles(projectId, { signal: abortControllerRef.current!.signal }); if (!response.ok) { const errorText = await response.text(); @@ -79,7 +81,7 @@ export function useFileTreeData(selectedProject: Project | null): UseFileTreeDat isActive = false; abortControllerRef.current?.abort(); }; - }, [selectedProject?.name, refreshKey]); + }, [selectedProject?.projectId, refreshKey]); return { files, diff --git a/src/components/file-tree/hooks/useFileTreeOperations.ts b/src/components/file-tree/hooks/useFileTreeOperations.ts index 398fcbe5..559654c6 100644 --- a/src/components/file-tree/hooks/useFileTreeOperations.ts +++ b/src/components/file-tree/hooks/useFileTreeOperations.ts @@ -126,7 +126,7 @@ export function useFileTreeOperations({ setOperationLoading(true); try { - const response = await api.renameFile(selectedProject.name, { + const response = await api.renameFile(selectedProject.projectId, { oldPath: renamingItem.path, newName: renameValue, }); @@ -161,7 +161,7 @@ export function useFileTreeOperations({ setOperationLoading(true); try { - const response = await api.deleteFile(selectedProject.name, { + const response = await api.deleteFile(selectedProject.projectId, { path: item.path, type: item.type, }); @@ -212,7 +212,7 @@ export function useFileTreeOperations({ setOperationLoading(true); try { - const response = await api.createFile(selectedProject.name, { + const response = await api.createFile(selectedProject.projectId, { path: newItemParent, type: newItemType, name: newItemName, @@ -287,7 +287,7 @@ export function useFileTreeOperations({ if (!selectedProject) return; // Use the binary streaming endpoint so downloads preserve raw bytes. - const response = await api.readFileBlob(selectedProject.name, item.path); + const response = await api.readFileBlob(selectedProject.projectId, item.path); if (!response.ok) { throw new Error('Failed to download file'); @@ -308,7 +308,7 @@ export function useFileTreeOperations({ const fullPath = currentPath ? `${currentPath}/${node.name}` : node.name; if (node.type === 'file') { - const response = await api.readFileBlob(selectedProject.name, node.path); + const response = await api.readFileBlob(selectedProject.projectId, node.path); if (!response.ok) { throw new Error(`Failed to download "${node.name}" for ZIP export`); } diff --git a/src/components/file-tree/hooks/useFileTreeUpload.ts b/src/components/file-tree/hooks/useFileTreeUpload.ts index c512091a..6879e3ae 100644 --- a/src/components/file-tree/hooks/useFileTreeUpload.ts +++ b/src/components/file-tree/hooks/useFileTreeUpload.ts @@ -154,7 +154,8 @@ export const useFileTreeUpload = ({ formData.append('relativePaths', JSON.stringify(relativePaths)); const response = await api.post( - `/projects/${encodeURIComponent(selectedProject!.name)}/files/upload`, + // File upload endpoint is keyed by DB projectId post-migration. + `/projects/${encodeURIComponent(selectedProject!.projectId)}/files/upload`, formData ); diff --git a/src/components/file-tree/types/types.ts b/src/components/file-tree/types/types.ts index fb2ac842..1cdb8194 100644 --- a/src/components/file-tree/types/types.ts +++ b/src/components/file-tree/types/types.ts @@ -19,7 +19,8 @@ export interface FileTreeImageSelection { name: string; path: string; projectPath?: string; - projectName: string; + // DB projectId; used by ImageViewer to build the raw content URL. + projectId: string; } export interface FileIconData { diff --git a/src/components/file-tree/view/FileTree.tsx b/src/components/file-tree/view/FileTree.tsx index e847613c..b42e1014 100644 --- a/src/components/file-tree/view/FileTree.tsx +++ b/src/components/file-tree/view/FileTree.tsx @@ -101,7 +101,9 @@ export default function FileTree({ selectedProject, onFileOpen }: FileTreeProps) name: item.name, path: item.path, projectPath: selectedProject.path, - projectName: selectedProject.name, + // Image URL uses the DB projectId so ImageViewer can hit the + // /api/projects/:projectId/files/content endpoint directly. + projectId: selectedProject.projectId, }); return; } diff --git a/src/components/file-tree/view/ImageViewer.tsx b/src/components/file-tree/view/ImageViewer.tsx index 0d151090..771b1f01 100644 --- a/src/components/file-tree/view/ImageViewer.tsx +++ b/src/components/file-tree/view/ImageViewer.tsx @@ -10,7 +10,7 @@ type ImageViewerProps = { }; export default function ImageViewer({ file, onClose }: ImageViewerProps) { - const imagePath = `/api/projects/${file.projectName}/files/content?path=${encodeURIComponent(file.path)}`; + const imagePath = `/api/projects/${file.projectId}/files/content?path=${encodeURIComponent(file.path)}`; const [imageUrl, setImageUrl] = useState(null); const [error, setError] = useState(null); const [loading, setLoading] = useState(true); diff --git a/src/components/git-panel/hooks/useGitPanelController.ts b/src/components/git-panel/hooks/useGitPanelController.ts index cb34cf13..4ef54002 100644 --- a/src/components/git-panel/hooks/useGitPanelController.ts +++ b/src/components/git-panel/hooks/useGitPanelController.ts @@ -64,10 +64,12 @@ export function useGitPanelController({ const [operationError, setOperationError] = useState(null); const clearOperationError = useCallback(() => setOperationError(null), []); - const selectedProjectNameRef = useRef(selectedProject?.name ?? null); + // Tracks the DB projectId so async requests can detect stale responses when + // the user switches projects mid-flight. + const selectedProjectIdRef = useRef(selectedProject?.projectId ?? null); useEffect(() => { - selectedProjectNameRef.current = selectedProject?.name ?? null; + selectedProjectIdRef.current = selectedProject?.projectId ?? null; }, [selectedProject]); const provider = useSelectedProvider(); @@ -78,18 +80,19 @@ export function useGitPanelController({ return; } - const projectName = selectedProject.name; + // Git endpoints receive the DB projectId via the `project` query param. + const projectId = selectedProject.projectId; try { const response = await fetchWithAuth( - `/api/git/diff?project=${encodeURIComponent(projectName)}&file=${encodeURIComponent(filePath)}`, + `/api/git/diff?project=${encodeURIComponent(projectId)}&file=${encodeURIComponent(filePath)}`, { signal }, ); const data = await readJson(response, signal); if ( signal?.aborted || - selectedProjectNameRef.current !== projectName + selectedProjectIdRef.current !== projectId ) { return; } @@ -116,16 +119,17 @@ export function useGitPanelController({ return; } - const projectName = selectedProject.name; + // `project` query param carries the DB projectId everywhere now. + const projectId = selectedProject.projectId; setIsLoading(true); try { - const response = await fetchWithAuth(`/api/git/status?project=${encodeURIComponent(projectName)}`, { signal }); + const response = await fetchWithAuth(`/api/git/status?project=${encodeURIComponent(projectId)}`, { signal }); const data = await readJson(response, signal); if ( signal?.aborted || - selectedProjectNameRef.current !== projectName + selectedProjectIdRef.current !== projectId ) { return; } @@ -150,7 +154,7 @@ export function useGitPanelController({ } if ( - selectedProjectNameRef.current !== projectName + selectedProjectIdRef.current !== projectId ) { return; } @@ -169,7 +173,7 @@ export function useGitPanelController({ } try { - const response = await fetchWithAuth(`/api/git/branches?project=${encodeURIComponent(selectedProject.name)}`); + const response = await fetchWithAuth(`/api/git/branches?project=${encodeURIComponent(selectedProject.projectId)}`); const data = await readJson(response); if (!data.error && data.branches) { @@ -196,7 +200,7 @@ export function useGitPanelController({ } try { - const response = await fetchWithAuth(`/api/git/remote-status?project=${encodeURIComponent(selectedProject.name)}`); + const response = await fetchWithAuth(`/api/git/remote-status?project=${encodeURIComponent(selectedProject.projectId)}`); const data = await readJson(response); if (!data.error) { @@ -222,7 +226,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, branch: branchName, }), }); @@ -257,7 +261,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, branch: trimmedBranchName, }), }); @@ -290,7 +294,7 @@ export function useGitPanelController({ const response = await fetchWithAuth('/api/git/delete-branch', { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ project: selectedProject.name, branch: branchName }), + body: JSON.stringify({ project: selectedProject.projectId, branch: branchName }), }); const data = await readJson(response); @@ -320,7 +324,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, }), }); @@ -351,7 +355,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, }), }); @@ -381,7 +385,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, }), }); @@ -411,7 +415,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, branch: currentBranch, }), }); @@ -442,7 +446,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, file: filePath, }), }); @@ -472,7 +476,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, file: filePath, }), }); @@ -498,7 +502,7 @@ export function useGitPanelController({ try { const response = await fetchWithAuth( - `/api/git/commits?project=${encodeURIComponent(selectedProject.name)}&limit=${RECENT_COMMITS_LIMIT}`, + `/api/git/commits?project=${encodeURIComponent(selectedProject.projectId)}&limit=${RECENT_COMMITS_LIMIT}`, ); const data = await readJson(response); @@ -518,7 +522,7 @@ export function useGitPanelController({ try { const response = await fetchWithAuth( - `/api/git/commit-diff?project=${encodeURIComponent(selectedProject.name)}&commit=${commitHash}`, + `/api/git/commit-diff?project=${encodeURIComponent(selectedProject.projectId)}&commit=${commitHash}`, ); const data = await readJson(response); @@ -546,7 +550,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, files, provider, }), @@ -578,7 +582,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, message, files, }), @@ -612,7 +616,7 @@ export function useGitPanelController({ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - project: selectedProject.name, + project: selectedProject.projectId, }), }); @@ -645,7 +649,7 @@ export function useGitPanelController({ try { const response = await fetchWithAuth( - `/api/git/file-with-diff?project=${encodeURIComponent(selectedProject.name)}&file=${encodeURIComponent(filePath)}`, + `/api/git/file-with-diff?project=${encodeURIComponent(selectedProject.projectId)}&file=${encodeURIComponent(filePath)}`, ); const data = await readJson(response); diff --git a/src/components/git-panel/hooks/useRevertLocalCommit.ts b/src/components/git-panel/hooks/useRevertLocalCommit.ts index 3c3ea918..86929528 100644 --- a/src/components/git-panel/hooks/useRevertLocalCommit.ts +++ b/src/components/git-panel/hooks/useRevertLocalCommit.ts @@ -3,7 +3,9 @@ import { authenticatedFetch } from '../../../utils/api'; import type { GitOperationResponse } from '../types/types'; type UseRevertLocalCommitOptions = { - projectName: string | null; + // DB primary key for the project; forwarded to the git API via the + // `project` body param. + projectId: string | null; onSuccess?: () => void; }; @@ -11,11 +13,11 @@ async function readJson(response: Response): Promise { return (await response.json()) as T; } -export function useRevertLocalCommit({ projectName, onSuccess }: UseRevertLocalCommitOptions) { +export function useRevertLocalCommit({ projectId, onSuccess }: UseRevertLocalCommitOptions) { const [isRevertingLocalCommit, setIsRevertingLocalCommit] = useState(false); const revertLatestLocalCommit = useCallback(async () => { - if (!projectName) { + if (!projectId) { return; } @@ -24,7 +26,7 @@ export function useRevertLocalCommit({ projectName, onSuccess }: UseRevertLocalC const response = await authenticatedFetch('/api/git/revert-local-commit', { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ project: projectName }), + body: JSON.stringify({ project: projectId }), }); const data = await readJson(response); @@ -39,7 +41,7 @@ export function useRevertLocalCommit({ projectName, onSuccess }: UseRevertLocalC } finally { setIsRevertingLocalCommit(false); } - }, [onSuccess, projectName]); + }, [onSuccess, projectId]); return { isRevertingLocalCommit, diff --git a/src/components/git-panel/view/GitPanel.tsx b/src/components/git-panel/view/GitPanel.tsx index fc6438bd..de9891dd 100644 --- a/src/components/git-panel/view/GitPanel.tsx +++ b/src/components/git-panel/view/GitPanel.tsx @@ -58,7 +58,9 @@ export default function GitPanel({ selectedProject, isMobile = false, onFileOpen }); const { isRevertingLocalCommit, revertLatestLocalCommit } = useRevertLocalCommit({ - projectName: selectedProject?.name ?? null, + // `projectId` (DB primary key) is forwarded to the revert API which uses it + // as the `project` body param. + projectId: selectedProject?.projectId ?? null, onSuccess: refreshAll, }); diff --git a/src/components/main-content/view/MainContent.tsx b/src/components/main-content/view/MainContent.tsx index 89197150..bf0b87fc 100644 --- a/src/components/main-content/view/MainContent.tsx +++ b/src/components/main-content/view/MainContent.tsx @@ -73,13 +73,15 @@ function MainContent({ }); useEffect(() => { - const selectedProjectName = selectedProject?.name; - const currentProjectName = currentProject?.name; + // Identify projects by DB `projectId`; the TaskMaster context uses the + // same identifier to key its internal maps. + const selectedProjectId = selectedProject?.projectId; + const currentProjectId = currentProject?.projectId; - if (selectedProject && selectedProjectName !== currentProjectName) { + if (selectedProject && selectedProjectId !== currentProjectId) { setCurrentProject?.(selectedProject); } - }, [selectedProject, currentProject?.name, setCurrentProject]); + }, [selectedProject, currentProject?.projectId, setCurrentProject]); useEffect(() => { if (!shouldShowTasksTab && activeTab === 'tasks') { diff --git a/src/components/mcp/hooks/useMcpServerForm.ts b/src/components/mcp/hooks/useMcpServerForm.ts index 52809cbe..f38ef83e 100644 --- a/src/components/mcp/hooks/useMcpServerForm.ts +++ b/src/components/mcp/hooks/useMcpServerForm.ts @@ -128,7 +128,8 @@ export function useMcpServerForm({ currentProjects .map((project) => ({ value: getProjectPath(project), - label: project.displayName || project.name, + // Fall back to projectId (DB primary key) when no display name is set. + label: project.displayName || project.projectId, })) .filter((project) => project.value) ), [currentProjects]); diff --git a/src/components/mcp/hooks/useMcpServers.ts b/src/components/mcp/hooks/useMcpServers.ts index 57ed81cc..e9cb2d3c 100644 --- a/src/components/mcp/hooks/useMcpServers.ts +++ b/src/components/mcp/hooks/useMcpServers.ts @@ -31,6 +31,8 @@ type GlobalMcpServerResponse = { results: GlobalMcpServerResult[]; }; +// Internal MCP-side shape; `name` is now filled from the DB projectId since +// the legacy Project.name field was removed during the projectId migration. type ProjectTarget = { name: string; displayName: string; @@ -111,6 +113,9 @@ const normalizeServer = ( bearerTokenEnvVar: server.bearerTokenEnvVar, envHttpHeaders: server.envHttpHeaders ?? {}, workspacePath: project?.path || server.workspacePath, + // Keep the `projectName` key in the MCP wire payload for backwards + // compatibility. ProjectTarget.name is populated from the DB `projectId` + // (see createProjectTargets) so this still carries the new identifier. projectName: project?.name || server.projectName, projectDisplayName: project?.displayName || server.projectDisplayName, }; @@ -126,8 +131,9 @@ const createProjectTargets = (projects: McpProject[]): ProjectTarget[] => { seen.add(projectPath); acc.push({ - name: project.name, - displayName: project.displayName || project.name, + // Use projectId as the stable internal identifier. + name: project.projectId, + displayName: project.displayName || project.projectId, path: projectPath, }); return acc; diff --git a/src/components/mcp/types.ts b/src/components/mcp/types.ts index 810258e9..2e3b618d 100644 --- a/src/components/mcp/types.ts +++ b/src/components/mcp/types.ts @@ -7,8 +7,10 @@ export type McpImportMode = 'form' | 'json'; export type McpFormMode = 'provider' | 'global'; export type KeyValueMap = Record; +// Internal MCP shape; `projectId` replaces the legacy `name` field from the +// projectName → projectId migration. export type McpProject = { - name: string; + projectId: string; displayName?: string; fullPath?: string; path?: string; diff --git a/src/components/plugins/view/PluginTabContent.tsx b/src/components/plugins/view/PluginTabContent.tsx index f3340738..7e7f8db5 100644 --- a/src/components/plugins/view/PluginTabContent.tsx +++ b/src/components/plugins/view/PluginTabContent.tsx @@ -12,6 +12,9 @@ type PluginTabContentProps = { type PluginContext = { theme: 'dark' | 'light'; + // Plugin contract historically used `name` for the project identifier; we + // keep that key and populate it from the DB `projectId` so external plugins + // continue to receive a stable opaque id. project: { name: string; path: string } | null; session: { id: string; title: string } | null; }; @@ -25,7 +28,7 @@ function buildContext( theme: isDarkMode ? 'dark' : 'light', project: selectedProject ? { - name: selectedProject.name, + name: selectedProject.projectId, path: selectedProject.fullPath || selectedProject.path || '', } : null, diff --git a/src/components/prd-editor/PRDEditor.tsx b/src/components/prd-editor/PRDEditor.tsx index 3ad89d63..5cf23e77 100644 --- a/src/components/prd-editor/PRDEditor.tsx +++ b/src/components/prd-editor/PRDEditor.tsx @@ -39,14 +39,16 @@ export default function PRDEditor({ projectPath, }); + // PRD hooks are now addressed by DB `projectId`; the backend resolves the + // `.taskmaster/docs` folder from the `projects` table. const { existingPrds, refreshExistingPrds } = usePrdRegistry({ - projectName: project?.name, + projectId: project?.projectId, }); const isExistingFile = useMemo(() => !isNewFile || Boolean(file?.isExisting), [file?.isExisting, isNewFile]); const { savePrd, saving, saveSuccess } = usePrdSave({ - projectName: project?.name, + projectId: project?.projectId, existingPrds, isExistingFile, onAfterSave: async () => { diff --git a/src/components/prd-editor/hooks/usePrdDocument.ts b/src/components/prd-editor/hooks/usePrdDocument.ts index 3728caf4..9d817d16 100644 --- a/src/components/prd-editor/hooks/usePrdDocument.ts +++ b/src/components/prd-editor/hooks/usePrdDocument.ts @@ -73,7 +73,7 @@ export function usePrdDocument({ return; } - if (!file?.projectName || !file?.path) { + if (!file?.projectId || !file?.path) { if (!isMounted) { return; } @@ -87,7 +87,8 @@ export function usePrdDocument({ try { setLoading(true); - const response = await api.readFile(file.projectName, file.path); + // readFile uses the DB projectId to resolve the project's path server-side. + const response = await api.readFile(file.projectId, file.path); if (!response.ok) { throw new Error(`Failed to load file: ${response.status} ${response.statusText}`); } diff --git a/src/components/prd-editor/hooks/usePrdRegistry.ts b/src/components/prd-editor/hooks/usePrdRegistry.ts index f7a40856..f28b1665 100644 --- a/src/components/prd-editor/hooks/usePrdRegistry.ts +++ b/src/components/prd-editor/hooks/usePrdRegistry.ts @@ -3,7 +3,8 @@ import { api } from '../../../utils/api'; import type { ExistingPrdFile, PrdListResponse } from '../types'; type UsePrdRegistryArgs = { - projectName?: string; + // DB primary key of the project (post migration). + projectId?: string; }; type UsePrdRegistryResult = { @@ -15,17 +16,17 @@ function getPrdFiles(data: PrdListResponse): ExistingPrdFile[] { return data.prdFiles || data.prds || []; } -export function usePrdRegistry({ projectName }: UsePrdRegistryArgs): UsePrdRegistryResult { +export function usePrdRegistry({ projectId }: UsePrdRegistryArgs): UsePrdRegistryResult { const [existingPrds, setExistingPrds] = useState([]); const refreshExistingPrds = useCallback(async () => { - if (!projectName) { + if (!projectId) { setExistingPrds([]); return; } try { - const response = await api.get(`/taskmaster/prd/${encodeURIComponent(projectName)}`); + const response = await api.get(`/taskmaster/prd/${encodeURIComponent(projectId)}`); if (!response.ok) { setExistingPrds([]); return; @@ -37,7 +38,7 @@ export function usePrdRegistry({ projectName }: UsePrdRegistryArgs): UsePrdRegis console.error('Failed to fetch existing PRDs:', error); setExistingPrds([]); } - }, [projectName]); + }, [projectId]); useEffect(() => { void refreshExistingPrds(); diff --git a/src/components/prd-editor/hooks/usePrdSave.ts b/src/components/prd-editor/hooks/usePrdSave.ts index 1d802ad5..b216f6cb 100644 --- a/src/components/prd-editor/hooks/usePrdSave.ts +++ b/src/components/prd-editor/hooks/usePrdSave.ts @@ -4,7 +4,8 @@ import type { ExistingPrdFile, SavePrdInput, SavePrdResult } from '../types'; import { ensurePrdExtension } from '../utils/fileName'; type UsePrdSaveArgs = { - projectName?: string; + // DB primary key of the project (post migration). + projectId?: string; existingPrds: ExistingPrdFile[]; isExistingFile: boolean; onAfterSave?: () => Promise; @@ -17,7 +18,7 @@ type UsePrdSaveResult = { }; export function usePrdSave({ - projectName, + projectId, existingPrds, isExistingFile, onAfterSave, @@ -44,7 +45,7 @@ export function usePrdSave({ return { status: 'failed', message: 'Please provide a filename for the PRD.' }; } - if (!projectName) { + if (!projectId) { return { status: 'failed', message: 'No project selected. Please reopen the editor.' }; } @@ -59,7 +60,7 @@ export function usePrdSave({ setSaving(true); try { - const response = await authenticatedFetch(`/api/taskmaster/prd/${encodeURIComponent(projectName)}`, { + const response = await authenticatedFetch(`/api/taskmaster/prd/${encodeURIComponent(projectId)}`, { method: 'POST', body: JSON.stringify({ fileName: finalFileName, @@ -100,7 +101,7 @@ export function usePrdSave({ setSaving(false); } }, - [existingPrds, isExistingFile, onAfterSave, projectName], + [existingPrds, isExistingFile, onAfterSave, projectId], ); return { diff --git a/src/components/prd-editor/types.ts b/src/components/prd-editor/types.ts index c9ddfd85..8cac8c15 100644 --- a/src/components/prd-editor/types.ts +++ b/src/components/prd-editor/types.ts @@ -1,7 +1,8 @@ export type PrdFile = { name?: string; path?: string; - projectName?: string; + // DB projectId used to resolve the project path when fetching file content. + projectId?: string; content?: string; isExisting?: boolean; }; diff --git a/src/components/project-creation-wizard/ProjectCreationWizard.tsx b/src/components/project-creation-wizard/ProjectCreationWizard.tsx index fca05c53..022b5ba3 100644 --- a/src/components/project-creation-wizard/ProjectCreationWizard.tsx +++ b/src/components/project-creation-wizard/ProjectCreationWizard.tsx @@ -4,13 +4,12 @@ import { useTranslation } from 'react-i18next'; import ErrorBanner from './components/ErrorBanner'; import StepConfiguration from './components/StepConfiguration'; import StepReview from './components/StepReview'; -import StepTypeSelection from './components/StepTypeSelection'; import WizardFooter from './components/WizardFooter'; import WizardProgress from './components/WizardProgress'; import { useGithubTokens } from './hooks/useGithubTokens'; -import { cloneWorkspaceWithProgress, createWorkspaceRequest } from './data/workspaceApi'; +import { cloneWorkspaceWithProgress, createProjectRequest } from './data/workspaceApi'; import { isCloneWorkflow, shouldShowGithubAuthentication } from './utils/pathUtils'; -import type { TokenMode, WizardFormState, WizardStep, WorkspaceType } from './types'; +import type { TokenMode, WizardFormState, WizardStep } from './types'; type ProjectCreationWizardProps = { onClose: () => void; @@ -18,7 +17,6 @@ type ProjectCreationWizardProps = { }; const initialFormState: WizardFormState = { - workspaceType: 'existing', workspacePath: '', githubUrl: '', tokenMode: 'stored', @@ -38,7 +36,7 @@ export default function ProjectCreationWizard({ const [cloneProgress, setCloneProgress] = useState(''); const shouldLoadTokens = - step === 2 && shouldShowGithubAuthentication(formState.workspaceType, formState.githubUrl); + step === 1 && shouldShowGithubAuthentication(formState.githubUrl); const autoSelectToken = useCallback((tokenId: string) => { setFormState((previous) => ({ ...previous, selectedGithubToken: tokenId })); @@ -60,11 +58,6 @@ export default function ProjectCreationWizard({ setFormState((previous) => ({ ...previous, [key]: value })); }, []); - const updateWorkspaceType = useCallback( - (workspaceType: WorkspaceType) => updateField('workspaceType', workspaceType), - [updateField], - ); - const updateTokenMode = useCallback( (tokenMode: TokenMode) => updateField('tokenMode', tokenMode), [updateField], @@ -74,22 +67,13 @@ export default function ProjectCreationWizard({ setError(null); if (step === 1) { - if (!formState.workspaceType) { - setError(t('projectWizard.errors.selectType')); - return; - } - setStep(2); - return; - } - - if (step === 2) { if (!formState.workspacePath.trim()) { setError(t('projectWizard.errors.providePath')); return; } - setStep(3); + setStep(2); } - }, [formState.workspacePath, formState.workspaceType, step, t]); + }, [formState.workspacePath, step, t]); const handleBack = useCallback(() => { setError(null); @@ -102,7 +86,7 @@ export default function ProjectCreationWizard({ setCloneProgress(''); try { - const shouldCloneRepository = isCloneWorkflow(formState.workspaceType, formState.githubUrl); + const shouldCloneRepository = isCloneWorkflow(formState.githubUrl); if (shouldCloneRepository) { const project = await cloneWorkspaceWithProgress( @@ -123,8 +107,7 @@ export default function ProjectCreationWizard({ return; } - const project = await createWorkspaceRequest({ - workspaceType: formState.workspaceType, + const project = await createProjectRequest({ path: formState.workspacePath.trim(), }); @@ -142,8 +125,8 @@ export default function ProjectCreationWizard({ }, [formState, onClose, onProjectCreated, t]); const shouldCloneRepository = useMemo( - () => isCloneWorkflow(formState.workspaceType, formState.githubUrl), - [formState.githubUrl, formState.workspaceType], + () => isCloneWorkflow(formState.githubUrl), + [formState.githubUrl], ); return ( @@ -173,15 +156,7 @@ export default function ProjectCreationWizard({ {error && } {step === 1 && ( - - )} - - {step === 2 && ( updateField('newGithubToken', newGithubToken) } - onAdvanceToConfirm={() => setStep(3)} + onAdvanceToConfirm={() => setStep(2)} /> )} - {step === 3 && ( + {step === 2 && (

- {workspaceType === 'existing' - ? t('projectWizard.step2.existingHelp') - : t('projectWizard.step2.newHelp')} + {t('projectWizard.step2.newHelp')}

- {workspaceType === 'new' && ( - <> -
- - onGithubUrlChange(event.target.value)} - placeholder="https://github.com/username/repository" - className="w-full" - disabled={isCreating} - /> -

- {t('projectWizard.step2.githubHelp')} -

-
+
+ + onGithubUrlChange(event.target.value)} + placeholder="https://github.com/username/repository" + className="w-full" + disabled={isCreating} + /> +

+ {t('projectWizard.step2.githubHelp')} +

+
- {showGithubAuth && ( - - )} - + {showGithubAuth && ( + )} ); diff --git a/src/components/project-creation-wizard/components/StepReview.tsx b/src/components/project-creation-wizard/components/StepReview.tsx index 843726a4..526dabf1 100644 --- a/src/components/project-creation-wizard/components/StepReview.tsx +++ b/src/components/project-creation-wizard/components/StepReview.tsx @@ -42,17 +42,6 @@ export default function StepReview({
-
- - {t('projectWizard.step3.workspaceType')} - - - {formState.workspaceType === 'existing' - ? t('projectWizard.step3.existingWorkspace') - : t('projectWizard.step3.newWorkspace')} - -
-
{t('projectWizard.step3.path')} @@ -60,7 +49,7 @@ export default function StepReview({
- {formState.workspaceType === 'new' && formState.githubUrl && ( + {formState.githubUrl && ( <>
@@ -94,11 +83,9 @@ export default function StepReview({
) : (

- {formState.workspaceType === 'existing' - ? t('projectWizard.step3.existingInfo') - : formState.githubUrl - ? t('projectWizard.step3.newWithClone') - : t('projectWizard.step3.newEmpty')} + {formState.githubUrl + ? t('projectWizard.step3.newWithClone') + : t('projectWizard.step3.newEmpty')}

)}
diff --git a/src/components/project-creation-wizard/components/StepTypeSelection.tsx b/src/components/project-creation-wizard/components/StepTypeSelection.tsx deleted file mode 100644 index efa4345e..00000000 --- a/src/components/project-creation-wizard/components/StepTypeSelection.tsx +++ /dev/null @@ -1,71 +0,0 @@ -import { FolderPlus, GitBranch } from 'lucide-react'; -import { useTranslation } from 'react-i18next'; -import type { WorkspaceType } from '../types'; - -type StepTypeSelectionProps = { - workspaceType: WorkspaceType; - onWorkspaceTypeChange: (workspaceType: WorkspaceType) => void; -}; - -export default function StepTypeSelection({ - workspaceType, - onWorkspaceTypeChange, -}: StepTypeSelectionProps) { - const { t } = useTranslation(); - - return ( -
-

- {t('projectWizard.step1.question')} -

- -
- - - -
-
- ); -} diff --git a/src/components/project-creation-wizard/components/WizardFooter.tsx b/src/components/project-creation-wizard/components/WizardFooter.tsx index 5fbf64ab..78cf2fec 100644 --- a/src/components/project-creation-wizard/components/WizardFooter.tsx +++ b/src/components/project-creation-wizard/components/WizardFooter.tsx @@ -37,7 +37,7 @@ export default function WizardFooter({ )} - )} - + )} ); diff --git a/src/components/sidebar/view/subcomponents/SidebarSessionItem.tsx b/src/components/sidebar/view/subcomponents/SidebarSessionItem.tsx index 507478a8..7da02cb2 100644 --- a/src/components/sidebar/view/subcomponents/SidebarSessionItem.tsx +++ b/src/components/sidebar/view/subcomponents/SidebarSessionItem.tsx @@ -1,8 +1,8 @@ -import { Check, Clock, Edit2, Trash2, X } from 'lucide-react'; +import { Check, Edit2, Trash2, X } from 'lucide-react'; import type { TFunction } from 'i18next'; + import { Badge, Button } from '../../../../shared/view/ui'; import { cn } from '../../../../lib/utils'; -import { formatTimeAgo } from '../../../../utils/dateUtils'; import type { Project, ProjectSession, LLMProvider } from '../../../../types/app'; import type { SessionWithProvider } from '../../types/types'; import { createSessionViewModel } from '../../utils/utils'; @@ -30,6 +30,34 @@ type SidebarSessionItemProps = { t: TFunction; }; +/** + * Compact relative time for sidebar rows: + * <1m, Xm, Xhr, Xd. + */ +const formatCompactSessionAge = (dateString: string, currentTime: Date): string => { + const date = new Date(dateString); + if (Number.isNaN(date.getTime())) { + return ''; + } + + const diffInMinutes = Math.floor(Math.max(0, currentTime.getTime() - date.getTime()) / (1000 * 60)); + if (diffInMinutes < 1) { + return '<1m'; + } + + if (diffInMinutes < 60) { + return `${diffInMinutes}m`; + } + + const diffInHours = Math.floor(diffInMinutes / 60); + if (diffInHours < 24) { + return `${diffInHours}hr`; + } + + const diffInDays = Math.floor(diffInHours / 24); + return `${diffInDays}d`; +}; + export default function SidebarSessionItem({ project, session, @@ -48,18 +76,21 @@ export default function SidebarSessionItem({ }: SidebarSessionItemProps) { const sessionView = createSessionViewModel(session, currentTime, t); const isSelected = selectedSession?.id === session.id; + const compactSessionAge = formatCompactSessionAge(sessionView.sessionTime, currentTime); + // Sessions are owned by a project identified by `projectId` (DB primary key) + // after the projectName → projectId migration. const selectMobileSession = () => { onProjectSelect(project); - onSessionSelect(session, project.name); + onSessionSelect(session, project.projectId); }; const saveEditedSession = () => { - onSaveEditingSession(project.name, session.id, editingSessionName, session.__provider); + onSaveEditingSession(project.projectId, session.id, editingSessionName, session.__provider); }; const requestDeleteSession = () => { - onDeleteSession(project.name, session.id, sessionView.sessionName, session.__provider); + onDeleteSession(project.projectId, session.id, sessionView.sessionName, session.__provider); }; return ( @@ -92,20 +123,18 @@ export default function SidebarSessionItem({
-
{sessionView.sessionName}
-
- - - {formatTimeAgo(sessionView.sessionTime, currentTime, t)} - +
+
{sessionView.sessionName}
+ {compactSessionAge && ( + {compactSessionAge} + )} +
+
{sessionView.messageCount > 0 && ( - + {sessionView.messageCount} )} - - -
@@ -131,28 +160,21 @@ export default function SidebarSessionItem({ 'w-full justify-start p-2 h-auto font-normal text-left hover:bg-accent/50 transition-colors duration-200', isSelected && 'bg-accent text-accent-foreground', )} - onClick={() => onSessionSelect(session, project.name)} + onClick={() => onSessionSelect(session, project.projectId)} >
-
{sessionView.sessionName}
-
- - - {formatTimeAgo(sessionView.sessionTime, currentTime, t)} - - {sessionView.messageCount > 0 && ( - - {sessionView.messageCount} - +
+
{sessionView.sessionName}
+ {compactSessionAge && ( + + {compactSessionAge} + )} - - - +
+
+ {sessionView.messageCount > 0 && {sessionView.messageCount}}
diff --git a/src/components/task-master/context/TaskMasterContext.tsx b/src/components/task-master/context/TaskMasterContext.tsx index 37953ad8..05afaddf 100644 --- a/src/components/task-master/context/TaskMasterContext.tsx +++ b/src/components/task-master/context/TaskMasterContext.tsx @@ -1,4 +1,5 @@ import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'; + import { api } from '../../../utils/api'; import { useAuth } from '../../auth/context/AuthContext'; import { useWebSocket } from '../../../contexts/WebSocketContext'; @@ -73,11 +74,19 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) const [isLoadingMCP, setIsLoadingMCP] = useState(false); const [error, setError] = useState(null); - const currentProjectNameRef = useRef(null); + // Track the active project via DB `projectId`; everything downstream uses + // the same identifier post-migration. + const currentProjectIdRef = useRef(null); + const projectTaskMasterRef = useRef(null); + const taskMasterRequestSeqRef = useRef(0); useEffect(() => { - currentProjectNameRef.current = currentProject?.name ?? null; - }, [currentProject?.name]); + currentProjectIdRef.current = currentProject?.projectId ?? null; + }, [currentProject?.projectId]); + + useEffect(() => { + projectTaskMasterRef.current = projectTaskMaster; + }, [projectTaskMaster]); const clearError = useCallback(() => { setError(null); @@ -88,16 +97,96 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) setError(createTaskMasterError(context, caughtError)); }, []); - const setCurrentProject = useCallback((project: TaskMasterProjectInput) => { - const normalizedProject = project ? enrichProject(project as TaskMasterProject) : null; - setCurrentProjectState(normalizedProject); - setProjectTaskMaster(normalizedProject?.taskmaster ?? null); + // Looks up projects by DB `projectId`; the legacy folder-derived `name` + // field has been removed from Project post-migration. + const applyTaskMasterInfo = useCallback((projectId: string, taskMasterInfo: TaskMasterProjectInfo | null) => { + setProjectTaskMaster(taskMasterInfo); - // Project-scoped task data is reset immediately to avoid stale task rendering. - setTasks([]); - setNextTask(null); + setProjects((previousProjects) => + previousProjects.map((project) => { + if (project.projectId !== projectId) { + return project; + } + + return enrichProject({ + ...project, + taskmaster: taskMasterInfo ?? undefined, + }); + }), + ); + + setCurrentProjectState((previousProject) => { + if (!previousProject || previousProject.projectId !== projectId) { + return previousProject; + } + + return enrichProject({ + ...previousProject, + taskmaster: taskMasterInfo ?? undefined, + }); + }); }, []); + const refreshCurrentProjectTaskMaster = useCallback( + async (projectId: string) => { + if (!projectId || !user || !token) { + return; + } + + const requestSequence = ++taskMasterRequestSeqRef.current; + + try { + const response = await api.projectTaskmaster(projectId); + if (!response.ok) { + throw new Error(`Failed to fetch TaskMaster details: ${response.status}`); + } + + const data = (await response.json()) as { taskmaster?: TaskMasterProjectInfo }; + const resolvedTaskMasterInfo = data.taskmaster ?? null; + + if ( + requestSequence !== taskMasterRequestSeqRef.current + || currentProjectIdRef.current !== projectId + ) { + return; + } + + applyTaskMasterInfo(projectId, resolvedTaskMasterInfo); + } catch (caughtError) { + if ( + requestSequence !== taskMasterRequestSeqRef.current + || currentProjectIdRef.current !== projectId + ) { + return; + } + + handleError('load selected project TaskMaster info', caughtError); + } + }, + [applyTaskMasterInfo, handleError, token, user], + ); + + const setCurrentProject = useCallback( + (project: TaskMasterProjectInput) => { + const normalizedProject = project ? enrichProject(project as TaskMasterProject) : null; + setCurrentProjectState(normalizedProject); + setProjectTaskMaster(normalizedProject?.taskmaster ?? null); + + // Project-scoped task data is reset immediately to avoid stale task rendering. + setTasks([]); + setNextTask(null); + + // `projectId` is the DB primary key used for every TaskMaster API call. + if (!normalizedProject?.projectId) { + taskMasterRequestSeqRef.current += 1; + return; + } + + void refreshCurrentProjectTaskMaster(normalizedProject.projectId); + }, + [refreshCurrentProjectTaskMaster], + ); + const refreshProjects = useCallback(async () => { if (!user || !token) { setProjects([]); @@ -121,27 +210,67 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) const loadedProjects = Array.isArray(data) ? (data as TaskMasterProject[]) : []; const enrichedProjects = loadedProjects.map((project) => enrichProject(project)); - setProjects(enrichedProjects); + setProjects((previousProjects) => { + // Cache is keyed by `projectId` (DB primary key) post-migration. + const taskMasterByProjectId = new Map( + previousProjects + .filter((project) => Boolean(project.taskmaster)) + .map((project) => [project.projectId, project.taskmaster]), + ); - const currentProjectName = currentProjectNameRef.current; - if (!currentProjectName) { + return enrichedProjects.map((project) => { + const cachedTaskMasterInfo = taskMasterByProjectId.get(project.projectId); + if (!cachedTaskMasterInfo) { + return project; + } + + return enrichProject({ + ...project, + taskmaster: cachedTaskMasterInfo, + }); + }); + }); + + const currentProjectId = currentProjectIdRef.current; + if (!currentProjectId) { return; } - const matchingProject = enrichedProjects.find((project) => project.name === currentProjectName) ?? null; - setCurrentProjectState(matchingProject); - setProjectTaskMaster(matchingProject?.taskmaster ?? null); + const matchingProject = enrichedProjects.find((project) => project.projectId === currentProjectId) ?? null; + + if (!matchingProject) { + taskMasterRequestSeqRef.current += 1; + setCurrentProjectState(null); + setProjectTaskMaster(null); + setTasks([]); + setNextTask(null); + return; + } + + const cachedTaskMasterInfo = matchingProject.taskmaster ?? projectTaskMasterRef.current ?? null; + setCurrentProjectState( + cachedTaskMasterInfo + ? enrichProject({ + ...matchingProject, + taskmaster: cachedTaskMasterInfo, + }) + : matchingProject, + ); + setProjectTaskMaster(cachedTaskMasterInfo); + + void refreshCurrentProjectTaskMaster(currentProjectId); } catch (caughtError) { handleError('load projects', caughtError); } finally { setIsLoading(false); } - }, [clearError, handleError, token, user]); + }, [clearError, handleError, refreshCurrentProjectTaskMaster, token, user]); const refreshTasks = useCallback(async () => { - const projectName = currentProject?.name; + // TaskMaster tasks endpoint now lives under /api/taskmaster/tasks/:projectId. + const projectId = currentProject?.projectId; - if (!projectName || !user || !token) { + if (!projectId || !user || !token) { setTasks([]); setNextTask(null); return; @@ -151,7 +280,7 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) setIsLoadingTasks(true); clearError(); - const response = await api.get(`/taskmaster/tasks/${encodeURIComponent(projectName)}`); + const response = await api.get(`/taskmaster/tasks/${encodeURIComponent(projectId)}`); if (!response.ok) { const errorPayload = (await response.json()) as { message?: string }; throw new Error(errorPayload.message ?? 'Failed to load tasks'); @@ -169,7 +298,7 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) } finally { setIsLoadingTasks(false); } - }, [clearError, currentProject?.name, handleError, token, user]); + }, [clearError, currentProject?.projectId, handleError, token, user]); const refreshMCPStatus = useCallback(async () => { if (!user || !token) { @@ -204,10 +333,10 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) }, [isAuthLoading, refreshMCPStatus, refreshProjects, token, user]); useEffect(() => { - if (currentProject?.name && user && token) { + if (currentProject?.projectId && user && token) { void refreshTasks(); } - }, [currentProject?.name, refreshTasks, token, user]); + }, [currentProject?.projectId, refreshTasks, token, user]); useEffect(() => { const message = latestMessage as TaskMasterWebSocketMessage | null; @@ -215,12 +344,16 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) return; } - if (message.type === 'taskmaster-project-updated' && message.projectName) { + // Broadcasts now identify projects by `projectId` (see taskmaster-websocket.js). + if (message.type === 'taskmaster-project-updated' && message.projectId) { + if (message.projectId === currentProjectIdRef.current) { + void refreshCurrentProjectTaskMaster(message.projectId); + } void refreshProjects(); return; } - if (message.type === 'taskmaster-tasks-updated' && message.projectName === currentProject?.name) { + if (message.type === 'taskmaster-tasks-updated' && message.projectId === currentProject?.projectId) { void refreshTasks(); return; } @@ -228,7 +361,7 @@ export function TaskMasterProvider({ children }: { children: React.ReactNode }) if (message.type === 'taskmaster-mcp-status-changed') { void refreshMCPStatus(); } - }, [currentProject?.name, latestMessage, refreshMCPStatus, refreshProjects, refreshTasks]); + }, [currentProject?.projectId, latestMessage, refreshCurrentProjectTaskMaster, refreshMCPStatus, refreshProjects, refreshTasks]); const contextValue = useMemo( () => ({ diff --git a/src/components/task-master/hooks/useProjectPrdFiles.ts b/src/components/task-master/hooks/useProjectPrdFiles.ts index 36ffa92a..e34b859b 100644 --- a/src/components/task-master/hooks/useProjectPrdFiles.ts +++ b/src/components/task-master/hooks/useProjectPrdFiles.ts @@ -3,7 +3,8 @@ import { api } from '../../../utils/api'; import type { PrdFile } from '../types'; type UseProjectPrdFilesOptions = { - projectName?: string; + // DB primary key of the project (post migration). + projectId?: string; }; type PrdResponse = { @@ -23,19 +24,19 @@ function normalizePrdResponse(responseData: PrdResponse): PrdFile[] { return []; } -export function useProjectPrdFiles({ projectName }: UseProjectPrdFilesOptions) { +export function useProjectPrdFiles({ projectId }: UseProjectPrdFilesOptions) { const [prdFiles, setPrdFiles] = useState([]); const [isLoadingPrdFiles, setIsLoadingPrdFiles] = useState(false); const refreshPrdFiles = useCallback(async () => { - if (!projectName) { + if (!projectId) { setPrdFiles([]); return; } try { setIsLoadingPrdFiles(true); - const response = await api.get(`/taskmaster/prd/${encodeURIComponent(projectName)}`); + const response = await api.get(`/taskmaster/prd/${encodeURIComponent(projectId)}`); if (!response.ok) { setPrdFiles([]); @@ -50,7 +51,7 @@ export function useProjectPrdFiles({ projectName }: UseProjectPrdFilesOptions) { } finally { setIsLoadingPrdFiles(false); } - }, [projectName]); + }, [projectId]); useEffect(() => { void refreshPrdFiles(); diff --git a/src/components/task-master/types.ts b/src/components/task-master/types.ts index bfbebbe2..fda63716 100644 --- a/src/components/task-master/types.ts +++ b/src/components/task-master/types.ts @@ -90,7 +90,8 @@ export type TaskMasterMcpStatus = { export type TaskMasterWebSocketMessage = { type?: string; - projectName?: string; + // Post-migration TaskMaster broadcasts identify projects by `projectId`. + projectId?: string; [key: string]: unknown; }; diff --git a/src/components/task-master/view/TaskBoard.tsx b/src/components/task-master/view/TaskBoard.tsx index 392cf15d..22a85a3f 100644 --- a/src/components/task-master/view/TaskBoard.tsx +++ b/src/components/task-master/view/TaskBoard.tsx @@ -72,13 +72,14 @@ export default function TaskBoard({ ); const loadPrdAndOpenEditor = async (prd: PrdFile) => { - if (!currentProject?.name) { + // Projects are addressed by DB projectId; see the projectName → projectId migration. + if (!currentProject?.projectId) { return; } try { const response = await api.get( - `/taskmaster/prd/${encodeURIComponent(currentProject.name)}/${encodeURIComponent(prd.name)}`, + `/taskmaster/prd/${encodeURIComponent(currentProject.projectId)}/${encodeURIComponent(prd.name)}`, ); if (!response.ok) { diff --git a/src/components/task-master/view/TaskMasterPanel.tsx b/src/components/task-master/view/TaskMasterPanel.tsx index 78e9763c..8381cfdd 100644 --- a/src/components/task-master/view/TaskMasterPanel.tsx +++ b/src/components/task-master/view/TaskMasterPanel.tsx @@ -24,7 +24,7 @@ export default function TaskMasterPanel({ isVisible }: TaskMasterPanelProps) { const [prdNotification, setPrdNotification] = useState(null); const notificationTimeoutRef = useRef(null); - const { prdFiles, refreshPrdFiles } = useProjectPrdFiles({ projectName: currentProject?.name }); + const { prdFiles, refreshPrdFiles } = useProjectPrdFiles({ projectId: currentProject?.projectId }); const showPrdNotification = useCallback((message: string) => { if (notificationTimeoutRef.current) { diff --git a/src/constants/config.ts b/src/constants/config.ts index 853cfb43..6aa7885b 100644 --- a/src/constants/config.ts +++ b/src/constants/config.ts @@ -5,12 +5,16 @@ export const IS_PLATFORM = import.meta.env.VITE_IS_PLATFORM === 'true'; /** - * For empty shell instances where no project is provided, - * we use a default project object to ensure the shell can still function. + * For empty shell instances where no project is provided, + * we use a default project object to ensure the shell can still function. * This prevents errors related to missing project data. + * + * `projectId` is set to a well-known sentinel ('default') because the empty + * shell doesn't correspond to any real project row in the database; any API + * call that routes through this placeholder must tolerate a missing match. */ export const DEFAULT_PROJECT_FOR_EMPTY_SHELL = { - name: 'default', + projectId: 'default', displayName: 'default', fullPath: IS_PLATFORM ? '/workspace' : '', path: IS_PLATFORM ? '/workspace' : '', diff --git a/src/hooks/useProjectsState.ts b/src/hooks/useProjectsState.ts index 28cf682e..c1c9344c 100644 --- a/src/hooks/useProjectsState.ts +++ b/src/hooks/useProjectsState.ts @@ -1,5 +1,6 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; import type { NavigateFunction } from 'react-router-dom'; + import { api } from '../utils/api'; import type { AppSocketMessage, @@ -40,9 +41,10 @@ const projectsHaveChanges = ( } const baseChanged = - nextProject.name !== prevProject.name || + nextProject.projectId !== prevProject.projectId || nextProject.displayName !== prevProject.displayName || nextProject.fullPath !== prevProject.fullPath || + Boolean(nextProject.isStarred) !== Boolean(prevProject.isStarred) || serialize(nextProject.sessionMeta) !== serialize(prevProject.sessionMeta) || serialize(nextProject.sessions) !== serialize(prevProject.sessions) || serialize(nextProject.taskmaster) !== serialize(prevProject.taskmaster); @@ -63,6 +65,32 @@ const projectsHaveChanges = ( }); }; +const mergeTaskMasterCache = (nextProjects: Project[], previousProjects: Project[]): Project[] => { + if (previousProjects.length === 0) { + return nextProjects; + } + + // Keyed by `projectId` (the DB primary key) so caches stay correct across + // renames and other mutations that might have changed the display name. + const previousTaskMasterByProject = new Map( + previousProjects + .filter((project) => Boolean(project.taskmaster)) + .map((project) => [project.projectId, project.taskmaster]), + ); + + return nextProjects.map((project) => { + const cachedTaskMasterInfo = previousTaskMasterByProject.get(project.projectId); + if (!cachedTaskMasterInfo) { + return project; + } + + return { + ...project, + taskmaster: cachedTaskMasterInfo, + }; + }); +}; + const getProjectSessions = (project: Project): ProjectSession[] => { return [ ...(project.sessions ?? []), @@ -72,6 +100,86 @@ const getProjectSessions = (project: Project): ProjectSession[] => { ]; }; +const countLoadedProjectSessions = (project: Project): number => getProjectSessions(project).length; + +const mergeSessionProviderLists = (baseSessions: ProjectSession[], additionalSessions: ProjectSession[]): ProjectSession[] => { + const merged = [...baseSessions]; + const seenSessionIds = new Set(baseSessions.map((session) => String(session.id))); + + for (const session of additionalSessions) { + const sessionId = String(session.id); + if (seenSessionIds.has(sessionId)) { + continue; + } + + merged.push(session); + seenSessionIds.add(sessionId); + } + + return merged; +}; + +const mergeExpandedSessionPages = (previousProjects: Project[], incomingProjects: Project[]): Project[] => { + if (previousProjects.length === 0) { + return incomingProjects; + } + + const previousByProjectId = new Map(previousProjects.map((project) => [project.projectId, project])); + + return incomingProjects.map((incomingProject) => { + const previousProject = previousByProjectId.get(incomingProject.projectId); + if (!previousProject) { + return incomingProject; + } + + const previousLoadedCount = countLoadedProjectSessions(previousProject); + const incomingLoadedCount = countLoadedProjectSessions(incomingProject); + if (previousLoadedCount <= incomingLoadedCount) { + return incomingProject; + } + + const mergedProject: Project = { + ...incomingProject, + sessions: mergeSessionProviderLists(incomingProject.sessions ?? [], previousProject.sessions ?? []), + cursorSessions: mergeSessionProviderLists(incomingProject.cursorSessions ?? [], previousProject.cursorSessions ?? []), + codexSessions: mergeSessionProviderLists(incomingProject.codexSessions ?? [], previousProject.codexSessions ?? []), + geminiSessions: mergeSessionProviderLists(incomingProject.geminiSessions ?? [], previousProject.geminiSessions ?? []), + }; + + const totalSessions = Number(incomingProject.sessionMeta?.total ?? previousLoadedCount); + mergedProject.sessionMeta = { + ...incomingProject.sessionMeta, + total: totalSessions, + hasMore: countLoadedProjectSessions(mergedProject) < totalSessions, + }; + + return mergedProject; + }); +}; + +const mergeProjectSessionPage = ( + existingProject: Project, + sessionsPage: Pick, +): Project => { + const mergedProject: Project = { + ...existingProject, + sessions: mergeSessionProviderLists(existingProject.sessions ?? [], sessionsPage.sessions ?? []), + cursorSessions: mergeSessionProviderLists(existingProject.cursorSessions ?? [], sessionsPage.cursorSessions ?? []), + codexSessions: mergeSessionProviderLists(existingProject.codexSessions ?? [], sessionsPage.codexSessions ?? []), + geminiSessions: mergeSessionProviderLists(existingProject.geminiSessions ?? [], sessionsPage.geminiSessions ?? []), + }; + + const totalSessions = Number(sessionsPage.sessionMeta?.total ?? existingProject.sessionMeta?.total ?? 0); + mergedProject.sessionMeta = { + ...existingProject.sessionMeta, + ...sessionsPage.sessionMeta, + total: totalSessions, + hasMore: countLoadedProjectSessions(mergedProject) < totalSessions, + }; + + return mergedProject; +}; + const isUpdateAdditive = ( currentProjects: Project[], updatedProjects: Project[], @@ -82,8 +190,8 @@ const isUpdateAdditive = ( return true; } - const currentSelectedProject = currentProjects.find((project) => project.name === selectedProject.name); - const updatedSelectedProject = updatedProjects.find((project) => project.name === selectedProject.name); + const currentSelectedProject = currentProjects.find((project) => project.projectId === selectedProject.projectId); + const updatedSelectedProject = updatedProjects.find((project) => project.projectId === selectedProject.projectId); if (!currentSelectedProject || !updatedSelectedProject) { return false; @@ -155,6 +263,7 @@ export function useProjectsState({ const [externalMessageUpdate, setExternalMessageUpdate] = useState(0); const loadingProgressTimeoutRef = useRef | null>(null); + const lastHandledMessageRef = useRef(null); const fetchProjects = useCallback(async ({ showLoadingState = true }: FetchProjectsOptions = {}) => { try { @@ -165,12 +274,15 @@ export function useProjectsState({ const projectData = (await response.json()) as Project[]; setProjects((prevProjects) => { + const projectsWithTaskMaster = mergeTaskMasterCache(projectData, prevProjects); + const mergedProjects = mergeExpandedSessionPages(prevProjects, projectsWithTaskMaster); + if (prevProjects.length === 0) { - return projectData; + return mergedProjects; } - return projectsHaveChanges(prevProjects, projectData, true) - ? projectData + return projectsHaveChanges(prevProjects, mergedProjects, true) + ? mergedProjects : prevProjects; }); } catch (error) { @@ -187,6 +299,48 @@ export function useProjectsState({ await fetchProjects({ showLoadingState: false }); }, [fetchProjects]); + // Hydrates TaskMaster details for the given `projectId`. The project + // identifier comes directly from the DB-driven /api/projects response. + const hydrateProjectTaskMaster = useCallback(async (projectId: string) => { + if (!projectId) { + return; + } + + try { + const response = await api.projectTaskmaster(projectId); + if (!response.ok) { + return; + } + + const data = (await response.json()) as { taskmaster?: Project['taskmaster'] }; + const taskMasterInfo = data.taskmaster; + if (!taskMasterInfo) { + return; + } + + setProjects((previousProjects) => + previousProjects.map((project) => + project.projectId === projectId + ? { ...project, taskmaster: taskMasterInfo } + : project, + ), + ); + + setSelectedProject((previousProject) => { + if (!previousProject || previousProject.projectId !== projectId) { + return previousProject; + } + + return { + ...previousProject, + taskmaster: taskMasterInfo, + }; + }); + } catch (error) { + console.error(`Error fetching TaskMaster info for project ${projectId}:`, error); + } + }, []); + const openSettings = useCallback((tab = 'tools') => { setSettingsInitialTab(tab); setShowSettings(true); @@ -196,6 +350,14 @@ export function useProjectsState({ void fetchProjects(); }, [fetchProjects]); + useEffect(() => { + if (!selectedProject?.projectId) { + return; + } + + void hydrateProjectTaskMaster(selectedProject.projectId); + }, [hydrateProjectTaskMaster, selectedProject?.projectId]); + // Auto-select the project when there is only one, so the user lands on the new session page useEffect(() => { if (!isLoadingProjects && projects.length === 1 && !selectedProject && !sessionId) { @@ -208,6 +370,15 @@ export function useProjectsState({ return; } + // `latestMessage` is event-like data. This effect also depends on local state + // (`projects`, `selectedProject`, `selectedSession`) to compute derived updates. + // Without this guard, handling one websocket message can update that local + // state, retrigger the effect, and re-handle the same websocket message. + if (lastHandledMessageRef.current === latestMessage) { + return; + } + lastHandledMessageRef.current = latestMessage; + if (latestMessage.type === 'loading_progress') { if (loadingProgressTimeoutRef.current) { clearTimeout(loadingProgressTimeoutRef.current); @@ -232,20 +403,12 @@ export function useProjectsState({ const projectsMessage = latestMessage as ProjectsUpdatedMessage; - if (projectsMessage.changedFile && selectedSession && selectedProject) { - const normalized = projectsMessage.changedFile.replace(/\\/g, '/'); - const changedFileParts = normalized.split('/'); + if (projectsMessage.updatedSessionId && selectedSession && selectedProject) { + if (projectsMessage.updatedSessionId === selectedSession.id) { + const isSessionActive = activeSessions.has(selectedSession.id); - if (changedFileParts.length >= 2) { - const filename = changedFileParts[changedFileParts.length - 1]; - const changedSessionId = filename.replace('.jsonl', ''); - - if (changedSessionId === selectedSession.id) { - const isSessionActive = activeSessions.has(selectedSession.id); - - if (!isSessionActive) { - setExternalMessageUpdate((prev) => prev + 1); - } + if (!isSessionActive) { + setExternalMessageUpdate((prev) => prev + 1); } } } @@ -254,7 +417,8 @@ export function useProjectsState({ (selectedSession && activeSessions.has(selectedSession.id)) || (activeSessions.size > 0 && Array.from(activeSessions).some((id) => id.startsWith('new-session-'))); - const updatedProjects = projectsMessage.projects; + const updatedProjectsWithTaskMaster = mergeTaskMasterCache(projectsMessage.projects, projects); + const updatedProjects = mergeExpandedSessionPages(projects, updatedProjectsWithTaskMaster); if ( hasActiveSession && @@ -263,14 +427,16 @@ export function useProjectsState({ return; } - setProjects(updatedProjects); + setProjects((previousProjects) => + projectsHaveChanges(previousProjects, updatedProjects, true) ? updatedProjects : previousProjects, + ); if (!selectedProject) { return; } const updatedSelectedProject = updatedProjects.find( - (project) => project.name === selectedProject.name, + (project) => project.projectId === selectedProject.projectId, ); if (!updatedSelectedProject) { @@ -308,10 +474,11 @@ export function useProjectsState({ return; } + // Project membership is resolved through `projectId` after the migration. for (const project of projects) { const claudeSession = project.sessions?.find((session) => session.id === sessionId); if (claudeSession) { - const shouldUpdateProject = selectedProject?.name !== project.name; + const shouldUpdateProject = selectedProject?.projectId !== project.projectId; const shouldUpdateSession = selectedSession?.id !== sessionId || selectedSession.__provider !== 'claude'; @@ -326,7 +493,7 @@ export function useProjectsState({ const cursorSession = project.cursorSessions?.find((session) => session.id === sessionId); if (cursorSession) { - const shouldUpdateProject = selectedProject?.name !== project.name; + const shouldUpdateProject = selectedProject?.projectId !== project.projectId; const shouldUpdateSession = selectedSession?.id !== sessionId || selectedSession.__provider !== 'cursor'; @@ -341,7 +508,7 @@ export function useProjectsState({ const codexSession = project.codexSessions?.find((session) => session.id === sessionId); if (codexSession) { - const shouldUpdateProject = selectedProject?.name !== project.name; + const shouldUpdateProject = selectedProject?.projectId !== project.projectId; const shouldUpdateSession = selectedSession?.id !== sessionId || selectedSession.__provider !== 'codex'; @@ -356,7 +523,7 @@ export function useProjectsState({ const geminiSession = project.geminiSessions?.find((session) => session.id === sessionId); if (geminiSession) { - const shouldUpdateProject = selectedProject?.name !== project.name; + const shouldUpdateProject = selectedProject?.projectId !== project.projectId; const shouldUpdateSession = selectedSession?.id !== sessionId || selectedSession.__provider !== 'gemini'; @@ -369,7 +536,7 @@ export function useProjectsState({ return; } } - }, [sessionId, projects, selectedProject?.name, selectedSession?.id, selectedSession?.__provider]); + }, [sessionId, projects, selectedProject?.projectId, selectedSession?.id, selectedSession?.__provider]); const handleProjectSelect = useCallback( (project: Project) => { @@ -398,17 +565,21 @@ export function useProjectsState({ } if (isMobile) { - const sessionProjectName = session.__projectName; - const currentProjectName = selectedProject?.name; + // Sessions are tagged with the owning project's DB `projectId` when + // picked from the sidebar (see useSidebarController); compare against + // the current selection's `projectId` so we know whether to collapse + // the sidebar after navigation. + const sessionProjectId = session.__projectId; + const currentProjectId = selectedProject?.projectId; - if (sessionProjectName !== currentProjectName) { + if (sessionProjectId !== currentProjectId) { setSidebarOpen(false); } } navigate(`/session/${session.id}`); }, - [activeTab, isMobile, navigate, selectedProject?.name], + [activeTab, isMobile, navigate, selectedProject?.projectId], ); const handleNewSession = useCallback( @@ -433,14 +604,40 @@ export function useProjectsState({ } setProjects((prevProjects) => - prevProjects.map((project) => ({ - ...project, - sessions: project.sessions?.filter((session) => session.id !== sessionIdToDelete) ?? [], - sessionMeta: { + prevProjects.map((project) => { + const sessions = project.sessions?.filter((session) => session.id !== sessionIdToDelete) ?? []; + const cursorSessions = project.cursorSessions?.filter((session) => session.id !== sessionIdToDelete) ?? []; + const codexSessions = project.codexSessions?.filter((session) => session.id !== sessionIdToDelete) ?? []; + const geminiSessions = project.geminiSessions?.filter((session) => session.id !== sessionIdToDelete) ?? []; + + const removedFromProject = ( + sessions.length !== (project.sessions?.length ?? 0) + || cursorSessions.length !== (project.cursorSessions?.length ?? 0) + || codexSessions.length !== (project.codexSessions?.length ?? 0) + || geminiSessions.length !== (project.geminiSessions?.length ?? 0) + ); + + if (!removedFromProject) { + return project; + } + + const updatedProject: Project = { + ...project, + sessions, + cursorSessions, + codexSessions, + geminiSessions, + }; + + const totalSessions = Math.max(0, Number(project.sessionMeta?.total ?? 0) - 1); + updatedProject.sessionMeta = { ...project.sessionMeta, - total: Math.max(0, (project.sessionMeta?.total as number | undefined ?? 0) - 1), - }, - })), + total: totalSessions, + hasMore: countLoadedProjectSessions(updatedProject) < totalSessions, + }; + + return updatedProject; + }), ); }, [navigate, selectedSession?.id], @@ -450,16 +647,18 @@ export function useProjectsState({ try { const response = await api.projects(); const freshProjects = (await response.json()) as Project[]; + const projectsWithTaskMaster = mergeTaskMasterCache(freshProjects, projects); + const mergedProjects = mergeExpandedSessionPages(projects, projectsWithTaskMaster); setProjects((prevProjects) => - projectsHaveChanges(prevProjects, freshProjects, true) ? freshProjects : prevProjects, + projectsHaveChanges(prevProjects, mergedProjects, true) ? mergedProjects : prevProjects, ); if (!selectedProject) { return; } - const refreshedProject = freshProjects.find((project) => project.name === selectedProject.name); + const refreshedProject = mergedProjects.find((project) => project.projectId === selectedProject.projectId); if (!refreshedProject) { return; } @@ -490,19 +689,70 @@ export function useProjectsState({ } catch (error) { console.error('Error refreshing sidebar:', error); } - }, [selectedProject, selectedSession]); + }, [projects, selectedProject, selectedSession]); + const loadMoreProjectSessions = useCallback(async (projectId: string) => { + const project = projects.find((candidate) => candidate.projectId === projectId); + if (!project) { + return; + } + + const loadedCount = countLoadedProjectSessions(project); + const totalCount = Number(project.sessionMeta?.total ?? 0); + if (totalCount > 0 && loadedCount >= totalCount) { + return; + } + + const response = await api.projectSessions(projectId, { + limit: 20, + offset: loadedCount, + }); + + if (!response.ok) { + const payload = (await response.json().catch(() => ({}))) as { error?: string | { message?: string } }; + const errorPayload = payload.error; + const message = + typeof errorPayload === 'string' + ? errorPayload + : errorPayload && typeof errorPayload === 'object' && errorPayload.message + ? errorPayload.message + : `Failed to load more sessions for project ${projectId}`; + throw new Error(message); + } + + const sessionsPage = (await response.json()) as Pick; + + let mergedProjectForSelection: Project | null = null; + setProjects((previousProjects) => + previousProjects.map((candidate) => { + if (candidate.projectId !== projectId) { + return candidate; + } + + const mergedProject = mergeProjectSessionPage(candidate, sessionsPage); + mergedProjectForSelection = mergedProject; + return mergedProject; + }), + ); + + if (selectedProject?.projectId === projectId && mergedProjectForSelection) { + setSelectedProject(mergedProjectForSelection); + } + }, [projects, selectedProject?.projectId]); + + // `projectId` is the DB identifier passed from the sidebar's delete flow + // after the migration away from folder-derived project names. const handleProjectDelete = useCallback( - (projectName: string) => { - if (selectedProject?.name === projectName) { + (projectId: string) => { + if (selectedProject?.projectId === projectId) { setSelectedProject(null); setSelectedSession(null); navigate('/'); } - setProjects((prevProjects) => prevProjects.filter((project) => project.name !== projectName)); + setProjects((prevProjects) => prevProjects.filter((project) => project.projectId !== projectId)); }, - [navigate, selectedProject?.name], + [navigate, selectedProject?.projectId], ); const sidebarSharedProps = useMemo( @@ -514,6 +764,7 @@ export function useProjectsState({ onSessionSelect: handleSessionSelect, onNewSession: handleNewSession, onSessionDelete: handleSessionDelete, + onLoadMoreSessions: loadMoreProjectSessions, onProjectDelete: handleProjectDelete, isLoading: isLoadingProjects, loadingProgress, @@ -529,6 +780,7 @@ export function useProjectsState({ handleProjectDelete, handleProjectSelect, handleSessionDelete, + loadMoreProjectSessions, handleSessionSelect, handleSidebarRefresh, isLoadingProjects, @@ -566,6 +818,7 @@ export function useProjectsState({ handleSessionSelect, handleNewSession, handleSessionDelete, + loadMoreProjectSessions, handleProjectDelete, handleSidebarRefresh, }; diff --git a/src/i18n/config.js b/src/i18n/config.js index 16a9330e..7fcda777 100644 --- a/src/i18n/config.js +++ b/src/i18n/config.js @@ -187,7 +187,7 @@ i18n fallbackLng: 'en', // Enable debug mode in development (logs missing keys to console) - debug: import.meta.env.DEV, + debug: false, // Namespaces - load only what's needed ns: ['common', 'settings', 'auth', 'sidebar', 'chat', 'codeEditor', 'tasks'], diff --git a/src/i18n/locales/de/common.json b/src/i18n/locales/de/common.json index bc4c7473..94260cff 100644 --- a/src/i18n/locales/de/common.json +++ b/src/i18n/locales/de/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "Konfiguration überprüfen", - "workspaceType": "Arbeitsbereichstyp:", "existingWorkspace": "Vorhandener Arbeitsbereich", "newWorkspace": "Neuer Arbeitsbereich", "path": "Pfad:", diff --git a/src/i18n/locales/en/common.json b/src/i18n/locales/en/common.json index 0d25fedf..f35eb3c5 100644 --- a/src/i18n/locales/en/common.json +++ b/src/i18n/locales/en/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "Review Your Configuration", - "workspaceType": "Workspace Type:", "existingWorkspace": "Existing Workspace", "newWorkspace": "New Workspace", "path": "Path:", diff --git a/src/i18n/locales/it/common.json b/src/i18n/locales/it/common.json index ff890937..7993c69d 100644 --- a/src/i18n/locales/it/common.json +++ b/src/i18n/locales/it/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "Rivedi la tua configurazione", - "workspaceType": "Tipo area di lavoro:", "existingWorkspace": "Area di lavoro esistente", "newWorkspace": "Nuova area di lavoro", "path": "Percorso:", diff --git a/src/i18n/locales/ja/common.json b/src/i18n/locales/ja/common.json index 097eb057..0651e25c 100644 --- a/src/i18n/locales/ja/common.json +++ b/src/i18n/locales/ja/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "設定の確認", - "workspaceType": "ワークスペースの種類:", "existingWorkspace": "既存のワークスペース", "newWorkspace": "新しいワークスペース", "path": "パス:", diff --git a/src/i18n/locales/ko/common.json b/src/i18n/locales/ko/common.json index b3554401..fcf1de52 100644 --- a/src/i18n/locales/ko/common.json +++ b/src/i18n/locales/ko/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "설정 검토", - "workspaceType": "워크스페이스 유형:", "existingWorkspace": "기존 워크스페이스", "newWorkspace": "새 워크스페이스", "path": "경로:", diff --git a/src/i18n/locales/ru/common.json b/src/i18n/locales/ru/common.json index 906b78eb..24331f4a 100644 --- a/src/i18n/locales/ru/common.json +++ b/src/i18n/locales/ru/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "Проверьте вашу конфигурацию", - "workspaceType": "Тип рабочего пространства:", "existingWorkspace": "Существующее рабочее пространство", "newWorkspace": "Новое рабочее пространство", "path": "Путь:", diff --git a/src/i18n/locales/tr/common.json b/src/i18n/locales/tr/common.json index 2a254034..3b9a6d27 100644 --- a/src/i18n/locales/tr/common.json +++ b/src/i18n/locales/tr/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "Yapılandırmanı Gözden Geçir", - "workspaceType": "Çalışma Alanı Türü:", "existingWorkspace": "Mevcut Çalışma Alanı", "newWorkspace": "Yeni Çalışma Alanı", "path": "Yol:", diff --git a/src/i18n/locales/zh-CN/common.json b/src/i18n/locales/zh-CN/common.json index 936fe5b2..05e0369d 100644 --- a/src/i18n/locales/zh-CN/common.json +++ b/src/i18n/locales/zh-CN/common.json @@ -176,7 +176,6 @@ }, "step3": { "reviewConfig": "查看您的配置", - "workspaceType": "工作区类型:", "existingWorkspace": "现有工作区", "newWorkspace": "新建工作区", "path": "路径:", diff --git a/src/stores/useSessionStore.ts b/src/stores/useSessionStore.ts index 5f6446be..ef581e12 100644 --- a/src/stores/useSessionStore.ts +++ b/src/stores/useSessionStore.ts @@ -8,8 +8,9 @@ */ import { useCallback, useMemo, useRef, useState } from 'react'; -import type { LLMProvider } from '../types/app'; + import { authenticatedFetch } from '../utils/api'; +import type { LLMProvider } from '../types/app'; // ─── NormalizedMessage (mirrors server/adapters/types.js) ──────────────────── @@ -164,13 +165,15 @@ export function useSessionStore() { const has = useCallback((sessionId: string) => storeRef.current.has(sessionId), []); /** - * Fetch messages from the unified endpoint and populate serverMessages. + * Fetch messages from the provider sessions endpoint and populate serverMessages. + * + * Provider and project metadata are resolved server-side from `sessionId`. */ const fetchFromServer = useCallback(async ( sessionId: string, opts: { provider?: LLMProvider; - projectName?: string; + projectId?: string; projectPath?: string; limit?: number | null; offset?: number; @@ -182,16 +185,13 @@ export function useSessionStore() { try { const params = new URLSearchParams(); - if (opts.provider) params.append('provider', opts.provider); - if (opts.projectName) params.append('projectName', opts.projectName); - if (opts.projectPath) params.append('projectPath', opts.projectPath); if (opts.limit !== null && opts.limit !== undefined) { params.append('limit', String(opts.limit)); params.append('offset', String(opts.offset ?? 0)); } const qs = params.toString(); - const url = `/api/sessions/${encodeURIComponent(sessionId)}/messages${qs ? `?${qs}` : ''}`; + const url = `/api/providers/sessions/${encodeURIComponent(sessionId)}/messages${qs ? `?${qs}` : ''}`; const response = await authenticatedFetch(url); if (!response.ok) { @@ -229,7 +229,7 @@ export function useSessionStore() { sessionId: string, opts: { provider?: LLMProvider; - projectName?: string; + projectId?: string; projectPath?: string; limit?: number; } = {}, @@ -238,15 +238,12 @@ export function useSessionStore() { if (!slot.hasMore) return slot; const params = new URLSearchParams(); - if (opts.provider) params.append('provider', opts.provider); - if (opts.projectName) params.append('projectName', opts.projectName); - if (opts.projectPath) params.append('projectPath', opts.projectPath); const limit = opts.limit ?? 20; params.append('limit', String(limit)); params.append('offset', String(slot.offset)); const qs = params.toString(); - const url = `/api/sessions/${encodeURIComponent(sessionId)}/messages${qs ? `?${qs}` : ''}`; + const url = `/api/providers/sessions/${encodeURIComponent(sessionId)}/messages${qs ? `?${qs}` : ''}`; try { const response = await authenticatedFetch(url); @@ -298,25 +295,22 @@ export function useSessionStore() { }, [getSlot, notify]); /** - * Re-fetch serverMessages from the unified endpoint (e.g., on projects_updated). + * Re-fetch serverMessages from the provider sessions endpoint. */ const refreshFromServer = useCallback(async ( sessionId: string, - opts: { + _opts: { provider?: LLMProvider; - projectName?: string; + projectId?: string; projectPath?: string; } = {}, ) => { const slot = getSlot(sessionId); try { const params = new URLSearchParams(); - if (opts.provider) params.append('provider', opts.provider); - if (opts.projectName) params.append('projectName', opts.projectName); - if (opts.projectPath) params.append('projectPath', opts.projectPath); const qs = params.toString(); - const url = `/api/sessions/${encodeURIComponent(sessionId)}/messages${qs ? `?${qs}` : ''}`; + const url = `/api/providers/sessions/${encodeURIComponent(sessionId)}/messages${qs ? `?${qs}` : ''}`; const response = await authenticatedFetch(url); if (!response.ok) throw new Error(`HTTP ${response.status}`); diff --git a/src/types/app.ts b/src/types/app.ts index 4c2f230d..364807f7 100644 --- a/src/types/app.ts +++ b/src/types/app.ts @@ -13,7 +13,9 @@ export interface ProjectSession { lastActivity?: string; messageCount?: number; __provider?: LLMProvider; - __projectName?: string; + // Tags the session with the owning project's DB `projectId` so UI handlers + // (session switching, sidebar focus, etc.) can match against selectedProject. + __projectId?: string; [key: string]: unknown; } @@ -30,11 +32,16 @@ export interface ProjectTaskmasterInfo { [key: string]: unknown; } +// After the projectName → projectId migration the backend no longer returns a +// folder-derived `name` string. Projects are now addressed everywhere by the +// DB-assigned `projectId` (primary key in the `projects` table), and the UI +// uses the same identifier for routing, state keys and API calls. export interface Project { - name: string; + projectId: string; displayName: string; fullPath: string; path?: string; + isStarred?: boolean; sessions?: ProjectSession[]; cursorSessions?: ProjectSession[]; codexSessions?: ProjectSession[]; @@ -56,7 +63,13 @@ export interface LoadingProgress { export interface ProjectsUpdatedMessage { type: 'projects_updated'; projects: Project[]; - changedFile?: string; + updatedSessionId?: string; + updatedSessionIds?: string[]; + watchProvider?: LLMProvider; + watchProviders?: LLMProvider[]; + changeType?: 'add' | 'change'; + changeTypes?: Array<'add' | 'change'>; + batched?: boolean; [key: string]: unknown; } diff --git a/src/utils/api.js b/src/utils/api.js index 3968daf9..0ac8d426 100644 --- a/src/utils/api.js +++ b/src/utils/api.js @@ -51,50 +51,48 @@ export const api = { // Protected endpoints // config endpoint removed - no longer needed (frontend uses window.location) + // After the projectName → projectId migration the path/query identifier is + // the DB-assigned `projectId`; parameter names reflect that for clarity. projects: () => authenticatedFetch('/api/projects'), - sessions: (projectName, limit = 5, offset = 0) => - authenticatedFetch(`/api/projects/${projectName}/sessions?limit=${limit}&offset=${offset}`), - // Unified endpoint — all providers through one URL - unifiedSessionMessages: (sessionId, provider = 'claude', { projectName = '', projectPath = '', limit = null, offset = 0 } = {}) => { + projectSessions: (projectId, { limit = 20, offset = 0 } = {}) => { + const params = new URLSearchParams(); + params.set('limit', String(limit)); + params.set('offset', String(offset)); + return authenticatedFetch(`/api/projects/${encodeURIComponent(projectId)}/sessions?${params.toString()}`); + }, + projectTaskmaster: (projectId) => + authenticatedFetch(`/api/projects/${encodeURIComponent(projectId)}/taskmaster`), + // Unified endpoint for persisted session messages. + // Provider/project metadata are resolved by the backend from sessionId. + unifiedSessionMessages: (sessionId, _provider = 'claude', { limit = null, offset = 0 } = {}) => { const params = new URLSearchParams(); - params.append('provider', provider); - if (projectName) params.append('projectName', projectName); - if (projectPath) params.append('projectPath', projectPath); if (limit !== null) { params.append('limit', String(limit)); params.append('offset', String(offset)); } const queryString = params.toString(); - return authenticatedFetch(`/api/sessions/${encodeURIComponent(sessionId)}/messages${queryString ? `?${queryString}` : ''}`); + return authenticatedFetch(`/api/providers/sessions/${encodeURIComponent(sessionId)}/messages${queryString ? `?${queryString}` : ''}`); }, - renameProject: (projectName, displayName) => - authenticatedFetch(`/api/projects/${projectName}/rename`, { + renameProject: (projectId, displayName) => + authenticatedFetch(`/api/projects/${projectId}/rename`, { method: 'PUT', body: JSON.stringify({ displayName }), }), - deleteSession: (projectName, sessionId) => - authenticatedFetch(`/api/projects/${projectName}/sessions/${sessionId}`, { + deleteSession: (sessionId) => + authenticatedFetch(`/api/providers/sessions/${sessionId}`, { method: 'DELETE', }), - renameSession: (sessionId, summary, provider) => - authenticatedFetch(`/api/sessions/${sessionId}/rename`, { + renameSession: (sessionId, summary) => + authenticatedFetch(`/api/providers/sessions/${sessionId}`, { method: 'PUT', - body: JSON.stringify({ summary, provider }), + body: JSON.stringify({ summary }), }), - deleteCodexSession: (sessionId) => - authenticatedFetch(`/api/codex/sessions/${sessionId}`, { - method: 'DELETE', - }), - deleteGeminiSession: (sessionId) => - authenticatedFetch(`/api/gemini/sessions/${sessionId}`, { - method: 'DELETE', - }), - deleteProject: (projectName, force = false, deleteData = false) => { + // `hardDelete` => server `?force=true` (remove DB row + Claude *.jsonl + sessions rows for path). + deleteProject: (projectId, hardDelete = false) => { const params = new URLSearchParams(); - if (force) params.set('force', 'true'); - if (deleteData) params.set('deleteData', 'true'); + if (hardDelete) params.set('force', 'true'); const qs = params.toString(); - return authenticatedFetch(`/api/projects/${projectName}${qs ? `?${qs}` : ''}`, { + return authenticatedFetch(`/api/projects/${projectId}${qs ? `?${qs}` : ''}`, { method: 'DELETE', }); }, @@ -102,69 +100,78 @@ export const api = { const token = localStorage.getItem('auth-token'); const params = new URLSearchParams({ q: query, limit: String(limit) }); if (token) params.set('token', token); - return `/api/search/conversations?${params.toString()}`; + return `/api/providers/search/sessions?${params.toString()}`; }, - createWorkspace: (workspaceData) => - authenticatedFetch('/api/projects/create-workspace', { + createProject: (projectData) => + authenticatedFetch('/api/projects/create-project', { method: 'POST', - body: JSON.stringify(workspaceData), + body: JSON.stringify(projectData), }), - readFile: (projectName, filePath) => - authenticatedFetch(`/api/projects/${projectName}/file?filePath=${encodeURIComponent(filePath)}`), - readFileBlob: (projectName, filePath) => - authenticatedFetch(`/api/projects/${projectName}/files/content?path=${encodeURIComponent(filePath)}`), - saveFile: (projectName, filePath, content) => - authenticatedFetch(`/api/projects/${projectName}/file`, { + migrateLegacyProjectStars: (projectIds) => + authenticatedFetch('/api/projects/migrate-legacy-stars', { + method: 'POST', + body: JSON.stringify({ projectIds }), + }), + toggleProjectStar: (projectId) => + authenticatedFetch(`/api/projects/${encodeURIComponent(projectId)}/toggle-star`, { + method: 'POST', + }), + readFile: (projectId, filePath) => + authenticatedFetch(`/api/projects/${projectId}/file?filePath=${encodeURIComponent(filePath)}`), + readFileBlob: (projectId, filePath) => + authenticatedFetch(`/api/projects/${projectId}/files/content?path=${encodeURIComponent(filePath)}`), + saveFile: (projectId, filePath, content) => + authenticatedFetch(`/api/projects/${projectId}/file`, { method: 'PUT', body: JSON.stringify({ filePath, content }), }), - getFiles: (projectName, options = {}) => - authenticatedFetch(`/api/projects/${projectName}/files`, options), + getFiles: (projectId, options = {}) => + authenticatedFetch(`/api/projects/${projectId}/files`, options), // File operations - createFile: (projectName, { path, type, name }) => - authenticatedFetch(`/api/projects/${projectName}/files/create`, { + createFile: (projectId, { path, type, name }) => + authenticatedFetch(`/api/projects/${projectId}/files/create`, { method: 'POST', body: JSON.stringify({ path, type, name }), }), - renameFile: (projectName, { oldPath, newName }) => - authenticatedFetch(`/api/projects/${projectName}/files/rename`, { + renameFile: (projectId, { oldPath, newName }) => + authenticatedFetch(`/api/projects/${projectId}/files/rename`, { method: 'PUT', body: JSON.stringify({ oldPath, newName }), }), - deleteFile: (projectName, { path, type }) => - authenticatedFetch(`/api/projects/${projectName}/files`, { + deleteFile: (projectId, { path, type }) => + authenticatedFetch(`/api/projects/${projectId}/files`, { method: 'DELETE', body: JSON.stringify({ path, type }), }), - uploadFiles: (projectName, formData) => - authenticatedFetch(`/api/projects/${projectName}/files/upload`, { + uploadFiles: (projectId, formData) => + authenticatedFetch(`/api/projects/${projectId}/files/upload`, { method: 'POST', body: formData, headers: {}, // Let browser set Content-Type for FormData }), - // TaskMaster endpoints + // TaskMaster endpoints — all addressed by DB projectId post-migration. taskmaster: { // Initialize TaskMaster in a project - init: (projectName) => - authenticatedFetch(`/api/taskmaster/init/${projectName}`, { + init: (projectId) => + authenticatedFetch(`/api/taskmaster/init/${projectId}`, { method: 'POST', }), // Add a new task - addTask: (projectName, { prompt, title, description, priority, dependencies }) => - authenticatedFetch(`/api/taskmaster/add-task/${projectName}`, { + addTask: (projectId, { prompt, title, description, priority, dependencies }) => + authenticatedFetch(`/api/taskmaster/add-task/${projectId}`, { method: 'POST', body: JSON.stringify({ prompt, title, description, priority, dependencies }), }), // Parse PRD to generate tasks - parsePRD: (projectName, { fileName, numTasks, append }) => - authenticatedFetch(`/api/taskmaster/parse-prd/${projectName}`, { + parsePRD: (projectId, { fileName, numTasks, append }) => + authenticatedFetch(`/api/taskmaster/parse-prd/${projectId}`, { method: 'POST', body: JSON.stringify({ fileName, numTasks, append }), }), @@ -174,15 +181,15 @@ export const api = { authenticatedFetch('/api/taskmaster/prd-templates'), // Apply a PRD template - applyTemplate: (projectName, { templateId, fileName, customizations }) => - authenticatedFetch(`/api/taskmaster/apply-template/${projectName}`, { + applyTemplate: (projectId, { templateId, fileName, customizations }) => + authenticatedFetch(`/api/taskmaster/apply-template/${projectId}`, { method: 'POST', body: JSON.stringify({ templateId, fileName, customizations }), }), // Update a task - updateTask: (projectName, taskId, updates) => - authenticatedFetch(`/api/taskmaster/update-task/${projectName}/${taskId}`, { + updateTask: (projectId, taskId, updates) => + authenticatedFetch(`/api/taskmaster/update-task/${projectId}/${taskId}`, { method: 'PUT', body: JSON.stringify(updates), }),