mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-05-16 01:12:46 +00:00
Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df3d5de8c1 | ||
|
|
b44c93d884 | ||
|
|
a1c6d667a4 | ||
|
|
0753c04783 | ||
|
|
e1275e6d3c | ||
|
|
ccb8b83692 | ||
|
|
641731b3ef | ||
|
|
d4bdc667cc | ||
|
|
ce724e6e3f | ||
|
|
b4a39c7297 | ||
|
|
44edf94f3a |
12
CHANGELOG.md
12
CHANGELOG.md
@@ -3,6 +3,18 @@
|
||||
All notable changes to CloudCLI UI will be documented in this file.
|
||||
|
||||
|
||||
## [1.31.2](https://github.com/siteboon/claudecodeui/compare/v1.31.0...v1.31.2) (2026-04-30)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* migrations for new sqlite schema ([0753c04](https://github.com/siteboon/claudecodeui/commit/0753c047837dab17b86ae4453027e30b465870f8))
|
||||
|
||||
## [1.31.0](https://github.com/siteboon/claudecodeui/compare/v1.30.0...v1.31.0) (2026-04-30)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **/status:** use CLAUDE_MODELS.DEFAULT instead of stale 'claude-sonnet-4.5' fallback ([#723](https://github.com/siteboon/claudecodeui/issues/723)) ([b4a39c7](https://github.com/siteboon/claudecodeui/commit/b4a39c729710a6294c62eb742e99e05f3e3914e9))
|
||||
|
||||
## [1.30.0](https://github.com/siteboon/claudecodeui/compare/v1.29.5...v1.30.0) (2026-04-21)
|
||||
|
||||
### New Features
|
||||
|
||||
@@ -164,7 +164,7 @@ CloudCLI has a plugin system that lets you add custom tabs with their own fronte
|
||||
|---|---|
|
||||
| **[Project Stats](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** | Shows file counts, lines of code, file-type breakdown, largest files, and recently modified files for your current project |
|
||||
| **[Web Terminal](https://github.com/cloudcli-ai/cloudcli-plugin-terminal)** | Full xterm.js terminal with multi-tab support|
|
||||
|
||||
| **[CloudCLI Scheduler](https://github.com/grostim/cloudcli-cron)** | Create workspace-scoped scheduled prompts and execute them through a local CLI such as Codex, Claude Code, or Gemini CLI|
|
||||
### Build Your Own
|
||||
|
||||
**[Plugin Starter Template →](https://github.com/cloudcli-ai/cloudcli-plugin-starter)** — fork this repo to create your own plugin. It includes a working example with frontend rendering, live context updates, and RPC communication to a backend server.
|
||||
|
||||
@@ -165,9 +165,8 @@ export default tseslint.config(
|
||||
pattern: [
|
||||
"server/projects.js",
|
||||
"server/sessionManager.js",
|
||||
"server/database/*.{js,ts}",
|
||||
"server/utils/runtime-paths.js",
|
||||
], // provider history loading still resolves session data through these legacy runtime/database files
|
||||
], // provider history loading still resolves session data through these legacy runtime files
|
||||
mode: "file",
|
||||
},
|
||||
{
|
||||
|
||||
63
package-lock.json
generated
63
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@cloudcli-ai/cloudcli",
|
||||
"version": "1.30.0",
|
||||
"version": "1.31.2",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@cloudcli-ai/cloudcli",
|
||||
"version": "1.30.0",
|
||||
"version": "1.31.2",
|
||||
"hasInstallScript": true,
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
@@ -25,6 +25,7 @@
|
||||
"@replit/codemirror-minimap": "^0.5.2",
|
||||
"@tailwindcss/typography": "^0.5.16",
|
||||
"@uiw/react-codemirror": "^4.23.13",
|
||||
"@vscode/ripgrep": "^1.17.1",
|
||||
"@xterm/addon-clipboard": "^0.1.0",
|
||||
"@xterm/addon-fit": "^0.10.0",
|
||||
"@xterm/addon-web-links": "^0.11.0",
|
||||
@@ -80,6 +81,7 @@
|
||||
"@types/node": "^22.19.7",
|
||||
"@types/react": "^18.2.43",
|
||||
"@types/react-dom": "^18.2.17",
|
||||
"@types/ws": "^8.18.1",
|
||||
"@vitejs/plugin-react": "^4.6.0",
|
||||
"auto-changelog": "^2.5.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
@@ -4142,6 +4144,16 @@
|
||||
"integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/ws": {
|
||||
"version": "8.18.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
|
||||
"integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz",
|
||||
@@ -4786,6 +4798,18 @@
|
||||
"vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@vscode/ripgrep": {
|
||||
"version": "1.17.1",
|
||||
"resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.17.1.tgz",
|
||||
"integrity": "sha512-xTs7DGyAO3IsJYOCTBP8LnTvPiYVKEuyv8s0xyJDBXfs8rhBfqnZPvb6xDT+RnwWzcXqW27xLS/aGrkjX7lNWw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"proxy-from-env": "^1.1.0",
|
||||
"yauzl": "^2.9.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@xterm/addon-clipboard": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@xterm/addon-clipboard/-/addon-clipboard-0.1.0.tgz",
|
||||
@@ -5618,6 +5642,15 @@
|
||||
"ieee754": "^1.1.13"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-equal-constant-time": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
@@ -8277,6 +8310,15 @@
|
||||
"walk-up-path": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/file-entry-cache": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz",
|
||||
@@ -13381,6 +13423,12 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pend": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
|
||||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/perfect-debounce": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.0.0.tgz",
|
||||
@@ -13774,7 +13822,6 @@
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pump": {
|
||||
@@ -18225,6 +18272,16 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/yauzl": {
|
||||
"version": "2.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "~0.2.3",
|
||||
"fd-slicer": "~1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/yocto-queue": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@cloudcli-ai/cloudcli",
|
||||
"version": "1.30.0",
|
||||
"version": "1.31.2",
|
||||
"description": "A web-based UI for Claude Code CLI",
|
||||
"type": "module",
|
||||
"main": "dist-server/server/index.js",
|
||||
@@ -80,6 +80,7 @@
|
||||
"@replit/codemirror-minimap": "^0.5.2",
|
||||
"@tailwindcss/typography": "^0.5.16",
|
||||
"@uiw/react-codemirror": "^4.23.13",
|
||||
"@vscode/ripgrep": "^1.17.1",
|
||||
"@xterm/addon-clipboard": "^0.1.0",
|
||||
"@xterm/addon-fit": "^0.10.0",
|
||||
"@xterm/addon-web-links": "^0.11.0",
|
||||
@@ -132,6 +133,7 @@
|
||||
"@types/node": "^22.19.7",
|
||||
"@types/react": "^18.2.43",
|
||||
"@types/react-dom": "^18.2.17",
|
||||
"@types/ws": "^8.18.1",
|
||||
"@vitejs/plugin-react": "^4.6.0",
|
||||
"auto-changelog": "^2.5.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
|
||||
@@ -1,593 +0,0 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { findAppRoot, getModuleDir } from '../utils/runtime-paths.js';
|
||||
import {
|
||||
APP_CONFIG_TABLE_SQL,
|
||||
USER_NOTIFICATION_PREFERENCES_TABLE_SQL,
|
||||
VAPID_KEYS_TABLE_SQL,
|
||||
PUSH_SUBSCRIPTIONS_TABLE_SQL,
|
||||
SESSION_NAMES_TABLE_SQL,
|
||||
SESSION_NAMES_LOOKUP_INDEX_SQL,
|
||||
DATABASE_SCHEMA_SQL
|
||||
} from './schema.js';
|
||||
|
||||
const __dirname = getModuleDir(import.meta.url);
|
||||
// The compiled backend lives under dist-server/server/database, but the install root we log
|
||||
// should still point at the project/app root. Resolving it here avoids build-layout drift.
|
||||
const APP_ROOT = findAppRoot(__dirname);
|
||||
|
||||
// ANSI color codes for terminal output
|
||||
const colors = {
|
||||
reset: '\x1b[0m',
|
||||
bright: '\x1b[1m',
|
||||
cyan: '\x1b[36m',
|
||||
dim: '\x1b[2m',
|
||||
};
|
||||
|
||||
const c = {
|
||||
info: (text) => `${colors.cyan}${text}${colors.reset}`,
|
||||
bright: (text) => `${colors.bright}${text}${colors.reset}`,
|
||||
dim: (text) => `${colors.dim}${text}${colors.reset}`,
|
||||
};
|
||||
|
||||
// Use DATABASE_PATH environment variable if set, otherwise use default location
|
||||
const DB_PATH = process.env.DATABASE_PATH || path.join(__dirname, 'auth.db');
|
||||
|
||||
// Ensure database directory exists if custom path is provided
|
||||
if (process.env.DATABASE_PATH) {
|
||||
const dbDir = path.dirname(DB_PATH);
|
||||
try {
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
fs.mkdirSync(dbDir, { recursive: true });
|
||||
console.log(`Created database directory: ${dbDir}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create database directory ${dbDir}:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// As part of 1.19.2 we are introducing a new location for auth.db. The below handles exisitng moving legacy database from install directory to new location
|
||||
const LEGACY_DB_PATH = path.join(__dirname, 'auth.db');
|
||||
if (DB_PATH !== LEGACY_DB_PATH && !fs.existsSync(DB_PATH) && fs.existsSync(LEGACY_DB_PATH)) {
|
||||
try {
|
||||
fs.copyFileSync(LEGACY_DB_PATH, DB_PATH);
|
||||
console.log(`[MIGRATION] Copied database from ${LEGACY_DB_PATH} to ${DB_PATH}`);
|
||||
for (const suffix of ['-wal', '-shm']) {
|
||||
if (fs.existsSync(LEGACY_DB_PATH + suffix)) {
|
||||
fs.copyFileSync(LEGACY_DB_PATH + suffix, DB_PATH + suffix);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`[MIGRATION] Could not copy legacy database: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create database connection
|
||||
const db = new Database(DB_PATH);
|
||||
|
||||
// app_config must exist before any other module imports (auth.js reads the JWT secret at load time).
|
||||
// runMigrations() also creates this table, but it runs too late for existing installations
|
||||
// where auth.js is imported before initializeDatabase() is called.
|
||||
db.exec(APP_CONFIG_TABLE_SQL);
|
||||
|
||||
// Show app installation path prominently
|
||||
const appInstallPath = APP_ROOT;
|
||||
console.log('');
|
||||
console.log(c.dim('═'.repeat(60)));
|
||||
console.log(`${c.info('[INFO]')} App Installation: ${c.bright(appInstallPath)}`);
|
||||
console.log(`${c.info('[INFO]')} Database: ${c.dim(path.relative(appInstallPath, DB_PATH))}`);
|
||||
if (process.env.DATABASE_PATH) {
|
||||
console.log(` ${c.dim('(Using custom DATABASE_PATH from environment)')}`);
|
||||
}
|
||||
console.log(c.dim('═'.repeat(60)));
|
||||
console.log('');
|
||||
|
||||
const runMigrations = () => {
|
||||
try {
|
||||
const tableInfo = db.prepare("PRAGMA table_info(users)").all();
|
||||
const columnNames = tableInfo.map(col => col.name);
|
||||
|
||||
if (!columnNames.includes('git_name')) {
|
||||
console.log('Running migration: Adding git_name column');
|
||||
db.exec('ALTER TABLE users ADD COLUMN git_name TEXT');
|
||||
}
|
||||
|
||||
if (!columnNames.includes('git_email')) {
|
||||
console.log('Running migration: Adding git_email column');
|
||||
db.exec('ALTER TABLE users ADD COLUMN git_email TEXT');
|
||||
}
|
||||
|
||||
if (!columnNames.includes('has_completed_onboarding')) {
|
||||
console.log('Running migration: Adding has_completed_onboarding column');
|
||||
db.exec('ALTER TABLE users ADD COLUMN has_completed_onboarding BOOLEAN DEFAULT 0');
|
||||
}
|
||||
|
||||
db.exec(USER_NOTIFICATION_PREFERENCES_TABLE_SQL);
|
||||
db.exec(VAPID_KEYS_TABLE_SQL);
|
||||
db.exec(PUSH_SUBSCRIPTIONS_TABLE_SQL);
|
||||
db.exec(APP_CONFIG_TABLE_SQL);
|
||||
db.exec(SESSION_NAMES_TABLE_SQL);
|
||||
db.exec(SESSION_NAMES_LOOKUP_INDEX_SQL);
|
||||
|
||||
console.log('Database migrations completed successfully');
|
||||
} catch (error) {
|
||||
console.error('Error running migrations:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize database with schema
|
||||
const initializeDatabase = async () => {
|
||||
try {
|
||||
db.exec(DATABASE_SCHEMA_SQL);
|
||||
console.log('Database initialized successfully');
|
||||
runMigrations();
|
||||
} catch (error) {
|
||||
console.error('Error initializing database:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// User database operations
|
||||
const userDb = {
|
||||
// Check if any users exist
|
||||
hasUsers: () => {
|
||||
try {
|
||||
const row = db.prepare('SELECT COUNT(*) as count FROM users').get();
|
||||
return row.count > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Create a new user
|
||||
createUser: (username, passwordHash) => {
|
||||
try {
|
||||
const stmt = db.prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)');
|
||||
const result = stmt.run(username, passwordHash);
|
||||
return { id: result.lastInsertRowid, username };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get user by username
|
||||
getUserByUsername: (username) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT * FROM users WHERE username = ? AND is_active = 1').get(username);
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Update last login time (non-fatal — logged but not thrown)
|
||||
updateLastLogin: (userId) => {
|
||||
try {
|
||||
db.prepare('UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?').run(userId);
|
||||
} catch (err) {
|
||||
console.warn('Failed to update last login:', err.message);
|
||||
}
|
||||
},
|
||||
|
||||
// Get user by ID
|
||||
getUserById: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1').get(userId);
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
getFirstUser: () => {
|
||||
try {
|
||||
const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1').get();
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
updateGitConfig: (userId, gitName, gitEmail) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?');
|
||||
stmt.run(gitName, gitEmail, userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
getGitConfig: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT git_name, git_email FROM users WHERE id = ?').get(userId);
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
completeOnboarding: (userId) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE users SET has_completed_onboarding = 1 WHERE id = ?');
|
||||
stmt.run(userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
hasCompletedOnboarding: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT has_completed_onboarding FROM users WHERE id = ?').get(userId);
|
||||
return row?.has_completed_onboarding === 1;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// API Keys database operations
|
||||
const apiKeysDb = {
|
||||
// Generate a new API key
|
||||
generateApiKey: () => {
|
||||
return 'ck_' + crypto.randomBytes(32).toString('hex');
|
||||
},
|
||||
|
||||
// Create a new API key
|
||||
createApiKey: (userId, keyName) => {
|
||||
try {
|
||||
const apiKey = apiKeysDb.generateApiKey();
|
||||
const stmt = db.prepare('INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)');
|
||||
const result = stmt.run(userId, keyName, apiKey);
|
||||
return { id: result.lastInsertRowid, keyName, apiKey };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get all API keys for a user
|
||||
getApiKeys: (userId) => {
|
||||
try {
|
||||
const rows = db.prepare('SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC').all(userId);
|
||||
return rows;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Validate API key and get user
|
||||
validateApiKey: (apiKey) => {
|
||||
try {
|
||||
const row = db.prepare(`
|
||||
SELECT u.id, u.username, ak.id as api_key_id
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1
|
||||
`).get(apiKey);
|
||||
|
||||
if (row) {
|
||||
// Update last_used timestamp
|
||||
db.prepare('UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?').run(row.api_key_id);
|
||||
}
|
||||
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Delete an API key
|
||||
deleteApiKey: (userId, apiKeyId) => {
|
||||
try {
|
||||
const stmt = db.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Toggle API key active status
|
||||
toggleApiKey: (userId, apiKeyId, isActive) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(isActive ? 1 : 0, apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// User credentials database operations (for GitHub tokens, GitLab tokens, etc.)
|
||||
const credentialsDb = {
|
||||
// Create a new credential
|
||||
createCredential: (userId, credentialName, credentialType, credentialValue, description = null) => {
|
||||
try {
|
||||
const stmt = db.prepare('INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)');
|
||||
const result = stmt.run(userId, credentialName, credentialType, credentialValue, description);
|
||||
return { id: result.lastInsertRowid, credentialName, credentialType };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get all credentials for a user, optionally filtered by type
|
||||
getCredentials: (userId, credentialType = null) => {
|
||||
try {
|
||||
let query = 'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ?';
|
||||
const params = [userId];
|
||||
|
||||
if (credentialType) {
|
||||
query += ' AND credential_type = ?';
|
||||
params.push(credentialType);
|
||||
}
|
||||
|
||||
query += ' ORDER BY created_at DESC';
|
||||
|
||||
const rows = db.prepare(query).all(...params);
|
||||
return rows;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get active credential value for a user by type (returns most recent active)
|
||||
getActiveCredential: (userId, credentialType) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1').get(userId, credentialType);
|
||||
return row?.credential_value || null;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Delete a credential
|
||||
deleteCredential: (userId, credentialId) => {
|
||||
try {
|
||||
const stmt = db.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(credentialId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Toggle credential active status
|
||||
toggleCredential: (userId, credentialId, isActive) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(isActive ? 1 : 0, credentialId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const DEFAULT_NOTIFICATION_PREFERENCES = {
|
||||
channels: {
|
||||
inApp: false,
|
||||
webPush: false
|
||||
},
|
||||
events: {
|
||||
actionRequired: true,
|
||||
stop: true,
|
||||
error: true
|
||||
}
|
||||
};
|
||||
|
||||
const normalizeNotificationPreferences = (value) => {
|
||||
const source = value && typeof value === 'object' ? value : {};
|
||||
|
||||
return {
|
||||
channels: {
|
||||
inApp: source.channels?.inApp === true,
|
||||
webPush: source.channels?.webPush === true
|
||||
},
|
||||
events: {
|
||||
actionRequired: source.events?.actionRequired !== false,
|
||||
stop: source.events?.stop !== false,
|
||||
error: source.events?.error !== false
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const notificationPreferencesDb = {
|
||||
getPreferences: (userId) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?').get(userId);
|
||||
if (!row) {
|
||||
const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES);
|
||||
db.prepare(
|
||||
'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)'
|
||||
).run(userId, JSON.stringify(defaults));
|
||||
return defaults;
|
||||
}
|
||||
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(row.preferences_json);
|
||||
} catch {
|
||||
parsed = DEFAULT_NOTIFICATION_PREFERENCES;
|
||||
}
|
||||
return normalizeNotificationPreferences(parsed);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
updatePreferences: (userId, preferences) => {
|
||||
try {
|
||||
const normalized = normalizeNotificationPreferences(preferences);
|
||||
db.prepare(
|
||||
`INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(user_id) DO UPDATE SET
|
||||
preferences_json = excluded.preferences_json,
|
||||
updated_at = CURRENT_TIMESTAMP`
|
||||
).run(userId, JSON.stringify(normalized));
|
||||
return normalized;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const pushSubscriptionsDb = {
|
||||
saveSubscription: (userId, endpoint, keysP256dh, keysAuth) => {
|
||||
try {
|
||||
db.prepare(
|
||||
`INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(endpoint) DO UPDATE SET
|
||||
user_id = excluded.user_id,
|
||||
keys_p256dh = excluded.keys_p256dh,
|
||||
keys_auth = excluded.keys_auth`
|
||||
).run(userId, endpoint, keysP256dh, keysAuth);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
getSubscriptions: (userId) => {
|
||||
try {
|
||||
return db.prepare('SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?').all(userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
removeSubscription: (endpoint) => {
|
||||
try {
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
removeAllForUser: (userId) => {
|
||||
try {
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Session custom names database operations
|
||||
const sessionNamesDb = {
|
||||
// Set (insert or update) a custom session name
|
||||
setName: (sessionId, provider, customName) => {
|
||||
db.prepare(`
|
||||
INSERT INTO session_names (session_id, provider, custom_name)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(session_id, provider)
|
||||
DO UPDATE SET custom_name = excluded.custom_name, updated_at = CURRENT_TIMESTAMP
|
||||
`).run(sessionId, provider, customName);
|
||||
},
|
||||
|
||||
// Get a single custom session name
|
||||
getName: (sessionId, provider) => {
|
||||
const row = db.prepare(
|
||||
'SELECT custom_name FROM session_names WHERE session_id = ? AND provider = ?'
|
||||
).get(sessionId, provider);
|
||||
return row?.custom_name || null;
|
||||
},
|
||||
|
||||
// Batch lookup — returns Map<sessionId, customName>
|
||||
getNames: (sessionIds, provider) => {
|
||||
if (!sessionIds.length) return new Map();
|
||||
const placeholders = sessionIds.map(() => '?').join(',');
|
||||
const rows = db.prepare(
|
||||
`SELECT session_id, custom_name FROM session_names
|
||||
WHERE session_id IN (${placeholders}) AND provider = ?`
|
||||
).all(...sessionIds, provider);
|
||||
return new Map(rows.map(r => [r.session_id, r.custom_name]));
|
||||
},
|
||||
|
||||
// Delete a custom session name
|
||||
deleteName: (sessionId, provider) => {
|
||||
return db.prepare(
|
||||
'DELETE FROM session_names WHERE session_id = ? AND provider = ?'
|
||||
).run(sessionId, provider).changes > 0;
|
||||
},
|
||||
};
|
||||
|
||||
// Apply custom session names from the database (overrides CLI-generated summaries)
|
||||
function applyCustomSessionNames(sessions, provider) {
|
||||
if (!sessions?.length) return;
|
||||
try {
|
||||
const ids = sessions.map(s => s.id);
|
||||
const customNames = sessionNamesDb.getNames(ids, provider);
|
||||
for (const session of sessions) {
|
||||
const custom = customNames.get(session.id);
|
||||
if (custom) session.summary = custom;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`[DB] Failed to apply custom session names for ${provider}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// App config database operations
|
||||
const appConfigDb = {
|
||||
get: (key) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT value FROM app_config WHERE key = ?').get(key);
|
||||
return row?.value || null;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
set: (key, value) => {
|
||||
db.prepare(
|
||||
'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value'
|
||||
).run(key, value);
|
||||
},
|
||||
|
||||
getOrCreateJwtSecret: () => {
|
||||
let secret = appConfigDb.get('jwt_secret');
|
||||
if (!secret) {
|
||||
secret = crypto.randomBytes(64).toString('hex');
|
||||
appConfigDb.set('jwt_secret', secret);
|
||||
}
|
||||
return secret;
|
||||
}
|
||||
};
|
||||
|
||||
// Backward compatibility - keep old names pointing to new system
|
||||
const githubTokensDb = {
|
||||
createGithubToken: (userId, tokenName, githubToken, description = null) => {
|
||||
return credentialsDb.createCredential(userId, tokenName, 'github_token', githubToken, description);
|
||||
},
|
||||
getGithubTokens: (userId) => {
|
||||
return credentialsDb.getCredentials(userId, 'github_token');
|
||||
},
|
||||
getActiveGithubToken: (userId) => {
|
||||
return credentialsDb.getActiveCredential(userId, 'github_token');
|
||||
},
|
||||
deleteGithubToken: (userId, tokenId) => {
|
||||
return credentialsDb.deleteCredential(userId, tokenId);
|
||||
},
|
||||
toggleGithubToken: (userId, tokenId, isActive) => {
|
||||
return credentialsDb.toggleCredential(userId, tokenId, isActive);
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
db,
|
||||
initializeDatabase,
|
||||
userDb,
|
||||
apiKeysDb,
|
||||
credentialsDb,
|
||||
notificationPreferencesDb,
|
||||
pushSubscriptionsDb,
|
||||
sessionNamesDb,
|
||||
applyCustomSessionNames,
|
||||
appConfigDb,
|
||||
githubTokensDb // Backward compatibility
|
||||
};
|
||||
@@ -1,102 +0,0 @@
|
||||
export const APP_CONFIG_TABLE_SQL = `CREATE TABLE IF NOT EXISTS app_config (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
|
||||
export const USER_NOTIFICATION_PREFERENCES_TABLE_SQL = `CREATE TABLE IF NOT EXISTS user_notification_preferences (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
preferences_json TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);`;
|
||||
|
||||
export const VAPID_KEYS_TABLE_SQL = `CREATE TABLE IF NOT EXISTS vapid_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_key TEXT NOT NULL,
|
||||
private_key TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);`;
|
||||
|
||||
export const PUSH_SUBSCRIPTIONS_TABLE_SQL = `CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
endpoint TEXT NOT NULL UNIQUE,
|
||||
keys_p256dh TEXT NOT NULL,
|
||||
keys_auth TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);`;
|
||||
|
||||
export const SESSION_NAMES_TABLE_SQL = `CREATE TABLE IF NOT EXISTS session_names (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(session_id, provider)
|
||||
);`;
|
||||
|
||||
export const SESSION_NAMES_LOOKUP_INDEX_SQL = `CREATE INDEX IF NOT EXISTS idx_session_names_lookup ON session_names(session_id, provider);`;
|
||||
|
||||
export const DATABASE_SCHEMA_SQL = `PRAGMA foreign_keys = ON;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_login DATETIME,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
git_name TEXT,
|
||||
git_email TEXT,
|
||||
has_completed_onboarding BOOLEAN DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
key_name TEXT NOT NULL,
|
||||
api_key TEXT UNIQUE NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_used DATETIME,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user_credentials (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
credential_name TEXT NOT NULL,
|
||||
credential_type TEXT NOT NULL,
|
||||
credential_value TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active);
|
||||
|
||||
${USER_NOTIFICATION_PREFERENCES_TABLE_SQL}
|
||||
|
||||
${VAPID_KEYS_TABLE_SQL}
|
||||
|
||||
${PUSH_SUBSCRIPTIONS_TABLE_SQL}
|
||||
|
||||
${SESSION_NAMES_TABLE_SQL}
|
||||
|
||||
${SESSION_NAMES_LOOKUP_INDEX_SQL}
|
||||
|
||||
${APP_CONFIG_TABLE_SQL}
|
||||
`;
|
||||
1158
server/index.js
1158
server/index.js
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { userDb, appConfigDb } from '../database/db.js';
|
||||
import { userDb, appConfigDb } from '../modules/database/index.js';
|
||||
import { IS_PLATFORM } from '../constants/config.js';
|
||||
|
||||
// Use env var if set, otherwise auto-generate a unique secret per installation
|
||||
|
||||
143
server/modules/database/connection.ts
Normal file
143
server/modules/database/connection.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
/**
|
||||
* Database connection management.
|
||||
*
|
||||
* Owns the single SQLite connection used across all repositories.
|
||||
* Handles path resolution, directory creation, legacy database migration,
|
||||
* and eager app_config bootstrap so the auth middleware can read the
|
||||
* JWT secret before the full schema is applied.
|
||||
*
|
||||
* Consumers should never create their own Database instance — they use
|
||||
* `getConnection()` to obtain the shared singleton.
|
||||
*/
|
||||
|
||||
import Database from 'better-sqlite3';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
import { APP_CONFIG_TABLE_SCHEMA_SQL } from '@/modules/database/schema.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Path resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Resolves the database file path from environment or falls back
|
||||
* to the legacy location inside the server/database/ folder.
|
||||
*
|
||||
* Priority:
|
||||
* 1. DATABASE_PATH environment variable (set by cli.js or load-env-vars.js)
|
||||
* 2. Legacy path: server/database/auth.db
|
||||
*/
|
||||
function resolveDatabasePath(): string {
|
||||
// process.env.DATABASE_PATH is set by load-env-vars.js to either the .env value or a default(~/.cloudcli/auth.db) in the user's home directory.
|
||||
return process.env.DATABASE_PATH || resolveLegacyDatabasePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the legacy database path (always inside server/database/).
|
||||
* Used for the one-time migration to the new external location.
|
||||
*/
|
||||
function resolveLegacyDatabasePath(): string {
|
||||
const serverDir = path.resolve(__dirname, '..', '..', '..');
|
||||
return path.join(serverDir, 'database', 'auth.db');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Directory & migration helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ensureDatabaseDirectory(dbPath: string): void {
|
||||
const dir = path.dirname(dbPath);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
console.log('Created database directory:', dir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If the database was moved to an external location (e.g. ~/.cloudcli/)
|
||||
* but the user still has a legacy auth.db inside the install directory,
|
||||
* copy it to the new location as a one-time migration.
|
||||
*/
|
||||
function migrateLegacyDatabase(targetPath: string): void {
|
||||
const legacyPath = resolveLegacyDatabasePath();
|
||||
|
||||
if (targetPath === legacyPath) return;
|
||||
if (fs.existsSync(targetPath)) return;
|
||||
if (!fs.existsSync(legacyPath)) return;
|
||||
|
||||
try {
|
||||
fs.copyFileSync(legacyPath, targetPath);
|
||||
console.log('Migrated legacy database', { from: legacyPath, to: targetPath });
|
||||
|
||||
|
||||
// copy the write-ahead log and shared memory files (auth.db-wal, auth.db-shm) if they exist, to preserve any uncommitted transactions
|
||||
for (const suffix of ['-wal', '-shm']) {
|
||||
const src = legacyPath + suffix;
|
||||
if (fs.existsSync(src)) {
|
||||
fs.copyFileSync(src, targetPath + suffix);
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error('Could not migrate legacy database', { error: err.message });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Singleton connection
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
let instance: Database.Database | null = null;
|
||||
|
||||
/**
|
||||
* Returns the shared database connection, creating it on first call.
|
||||
*
|
||||
* The first invocation:
|
||||
* 1. Resolves the target database path
|
||||
* 2. Ensures the parent directory exists
|
||||
* 3. Migrates from the legacy install-directory path if needed
|
||||
* 4. Opens the SQLite connection
|
||||
* 5. Eagerly creates the app_config table (auth reads JWT secret at import time)
|
||||
* 6. Logs the database location
|
||||
*/
|
||||
export function getConnection(): Database.Database {
|
||||
if (instance) return instance;
|
||||
|
||||
const dbPath = resolveDatabasePath();
|
||||
|
||||
ensureDatabaseDirectory(dbPath);
|
||||
migrateLegacyDatabase(dbPath);
|
||||
|
||||
instance = new Database(dbPath);
|
||||
|
||||
// app_config must exist immediately — the auth middleware reads
|
||||
// the JWT secret at module-load time, before initializeDatabase() runs.
|
||||
instance.exec(APP_CONFIG_TABLE_SCHEMA_SQL);
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the resolved database file path without opening a connection.
|
||||
* Useful for diagnostics and CLI status commands.
|
||||
*/
|
||||
export function getDatabasePath(): string {
|
||||
return resolveDatabasePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the database connection and clears the singleton.
|
||||
* Primarily used for graceful shutdown or testing.
|
||||
*/
|
||||
export function closeConnection(): void {
|
||||
if (instance) {
|
||||
instance.close();
|
||||
instance = null;
|
||||
console.log('Database connection closed');
|
||||
}
|
||||
}
|
||||
12
server/modules/database/index.ts
Normal file
12
server/modules/database/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export { initializeDatabase } from '@/modules/database/init-db.js';
|
||||
export { apiKeysDb } from '@/modules/database/repositories/api-keys.js';
|
||||
export { appConfigDb } from '@/modules/database/repositories/app-config.js';
|
||||
export { credentialsDb } from '@/modules/database/repositories/credentials.js';
|
||||
export { githubTokensDb } from '@/modules/database/repositories/github-tokens.js';
|
||||
export { notificationPreferencesDb } from '@/modules/database/repositories/notification-preferences.js';
|
||||
export { projectsDb } from '@/modules/database/repositories/projects.db.js';
|
||||
export { pushSubscriptionsDb } from '@/modules/database/repositories/push-subscriptions.js';
|
||||
export { scanStateDb } from '@/modules/database/repositories/scan-state.db.js';
|
||||
export { sessionsDb } from '@/modules/database/repositories/sessions.db.js';
|
||||
export { userDb } from '@/modules/database/repositories/users.js';
|
||||
export { vapidKeysDb } from '@/modules/database/repositories/vapid-keys.js';
|
||||
17
server/modules/database/init-db.ts
Normal file
17
server/modules/database/init-db.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { getConnection } from "@/modules/database/connection.js";
|
||||
import { runMigrations } from "@/modules/database/migrations.js";
|
||||
import { INIT_SCHEMA_SQL } from "@/modules/database/schema.js";
|
||||
|
||||
// Initialize database with schema
|
||||
export const initializeDatabase = async () => {
|
||||
try {
|
||||
const db = getConnection();
|
||||
db.exec(INIT_SCHEMA_SQL);
|
||||
console.log('Database schema applied');
|
||||
runMigrations(db);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.log('Database initialization failed', { error: message });
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
443
server/modules/database/migrations.ts
Normal file
443
server/modules/database/migrations.ts
Normal file
@@ -0,0 +1,443 @@
|
||||
import { Database } from 'better-sqlite3';
|
||||
|
||||
import {
|
||||
APP_CONFIG_TABLE_SCHEMA_SQL,
|
||||
LAST_SCANNED_AT_SQL,
|
||||
PROJECTS_TABLE_SCHEMA_SQL,
|
||||
PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL,
|
||||
SESSIONS_TABLE_SCHEMA_SQL,
|
||||
USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL,
|
||||
VAPID_KEYS_TABLE_SCHEMA_SQL,
|
||||
} from '@/modules/database/schema.js';
|
||||
|
||||
const SQLITE_UUID_SQL = `
|
||||
lower(hex(randomblob(4))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(6)))
|
||||
`;
|
||||
|
||||
type TableInfoRow = {
|
||||
name: string;
|
||||
pk: number;
|
||||
};
|
||||
|
||||
const addColumnToTableIfNotExists = (
|
||||
db: Database,
|
||||
tableName: string,
|
||||
columnNames: string[],
|
||||
columnName: string,
|
||||
columnType: string
|
||||
) => {
|
||||
if (!columnNames.includes(columnName)) {
|
||||
console.log(`Running migration: Adding ${columnName} column to ${tableName} table`);
|
||||
db.exec(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType}`);
|
||||
}
|
||||
};
|
||||
|
||||
const tableExists = (db: Database, tableName: string): boolean =>
|
||||
Boolean(
|
||||
db
|
||||
.prepare("SELECT name FROM sqlite_master WHERE type = 'table' AND name = ?")
|
||||
.get(tableName)
|
||||
);
|
||||
|
||||
const getTableInfo = (db: Database, tableName: string): TableInfoRow[] =>
|
||||
db.prepare(`PRAGMA table_info(${tableName})`).all() as TableInfoRow[];
|
||||
|
||||
const migrateLegacySessionNames = (db: Database): void => {
|
||||
const hasLegacySessionNamesTable = tableExists(db, 'session_names');
|
||||
const hasSessionsTable = tableExists(db, 'sessions');
|
||||
|
||||
if (!hasLegacySessionNamesTable) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasSessionsTable) {
|
||||
console.log('Running migration: Merging session_names into sessions');
|
||||
db.exec(`
|
||||
INSERT INTO sessions (session_id, provider, custom_name, created_at, updated_at)
|
||||
SELECT
|
||||
session_id,
|
||||
COALESCE(provider, 'claude'),
|
||||
custom_name,
|
||||
COALESCE(created_at, CURRENT_TIMESTAMP),
|
||||
COALESCE(updated_at, CURRENT_TIMESTAMP)
|
||||
FROM session_names
|
||||
WHERE true
|
||||
ON CONFLICT(session_id) DO UPDATE SET
|
||||
provider = excluded.provider,
|
||||
custom_name = COALESCE(excluded.custom_name, sessions.custom_name),
|
||||
created_at = COALESCE(sessions.created_at, excluded.created_at),
|
||||
updated_at = COALESCE(excluded.updated_at, sessions.updated_at)
|
||||
`);
|
||||
db.exec('DROP TABLE session_names');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Renaming session_names table to sessions');
|
||||
db.exec('ALTER TABLE session_names RENAME TO sessions');
|
||||
};
|
||||
|
||||
const migrateLegacyWorkspaceTableIntoProjects = (db: Database): void => {
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
|
||||
if (!tableExists(db, 'workspace_original_paths')) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Migrating workspace_original_paths data into projects');
|
||||
db.exec(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN workspace_id IS NULL OR trim(workspace_id) = ''
|
||||
THEN ${SQLITE_UUID_SQL}
|
||||
ELSE workspace_id
|
||||
END,
|
||||
workspace_path,
|
||||
custom_workspace_name,
|
||||
COALESCE(isStarred, 0),
|
||||
0
|
||||
FROM workspace_original_paths
|
||||
WHERE workspace_path IS NOT NULL AND trim(workspace_path) <> ''
|
||||
ON CONFLICT(project_path) DO UPDATE SET
|
||||
custom_project_name = COALESCE(projects.custom_project_name, excluded.custom_project_name),
|
||||
isStarred = COALESCE(projects.isStarred, excluded.isStarred)
|
||||
`);
|
||||
};
|
||||
|
||||
const rebuildProjectsTableWithPrimaryKeySchema = (db: Database): void => {
|
||||
const hasProjectsTable = tableExists(db, 'projects');
|
||||
if (!hasProjectsTable) {
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
return;
|
||||
}
|
||||
|
||||
const projectsTableInfo = getTableInfo(db, 'projects');
|
||||
const columnNames = projectsTableInfo.map((column) => column.name);
|
||||
const hasProjectIdPrimaryKey = projectsTableInfo.some(
|
||||
(column) => column.name === 'project_id' && column.pk === 1,
|
||||
);
|
||||
|
||||
if (hasProjectIdPrimaryKey) {
|
||||
addColumnToTableIfNotExists(db, 'projects', columnNames, 'custom_project_name', 'TEXT DEFAULT NULL');
|
||||
addColumnToTableIfNotExists(db, 'projects', columnNames, 'isStarred', 'BOOLEAN DEFAULT 0');
|
||||
addColumnToTableIfNotExists(db, 'projects', columnNames, 'isArchived', 'BOOLEAN DEFAULT 0');
|
||||
db.exec(`
|
||||
UPDATE projects
|
||||
SET project_id = ${SQLITE_UUID_SQL}
|
||||
WHERE project_id IS NULL OR trim(project_id) = ''
|
||||
`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Rebuilding projects table to enforce project_id primary key');
|
||||
|
||||
const projectPathExpression = columnNames.includes('project_path')
|
||||
? 'project_path'
|
||||
: columnNames.includes('workspace_path')
|
||||
? 'workspace_path'
|
||||
: 'NULL';
|
||||
|
||||
const customProjectNameExpression = columnNames.includes('custom_project_name')
|
||||
? 'custom_project_name'
|
||||
: columnNames.includes('custom_workspace_name')
|
||||
? 'custom_workspace_name'
|
||||
: 'NULL';
|
||||
|
||||
const isStarredExpression = columnNames.includes('isStarred') ? 'COALESCE(isStarred, 0)' : '0';
|
||||
|
||||
const isArchivedExpression = columnNames.includes('isArchived') ? 'COALESCE(isArchived, 0)' : '0';
|
||||
|
||||
const projectIdExpression = columnNames.includes('project_id')
|
||||
? `CASE
|
||||
WHEN project_id IS NULL OR trim(project_id) = ''
|
||||
THEN ${SQLITE_UUID_SQL}
|
||||
ELSE project_id
|
||||
END`
|
||||
: SQLITE_UUID_SQL;
|
||||
|
||||
db.exec('PRAGMA foreign_keys = OFF');
|
||||
try {
|
||||
db.exec('BEGIN TRANSACTION');
|
||||
db.exec('DROP TABLE IF EXISTS projects__new');
|
||||
db.exec(`
|
||||
CREATE TABLE projects__new (
|
||||
project_id TEXT PRIMARY KEY NOT NULL,
|
||||
project_path TEXT NOT NULL UNIQUE,
|
||||
custom_project_name TEXT DEFAULT NULL,
|
||||
isStarred BOOLEAN DEFAULT 0,
|
||||
isArchived BOOLEAN DEFAULT 0
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
WITH source_rows AS (
|
||||
SELECT
|
||||
${projectPathExpression} AS project_path,
|
||||
${customProjectNameExpression} AS custom_project_name,
|
||||
${isStarredExpression} AS isStarred,
|
||||
${isArchivedExpression} AS isArchived,
|
||||
${projectIdExpression} AS candidate_project_id,
|
||||
rowid AS source_rowid
|
||||
FROM projects
|
||||
WHERE ${projectPathExpression} IS NOT NULL AND trim(${projectPathExpression}) <> ''
|
||||
),
|
||||
deduped_paths AS (
|
||||
SELECT
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived,
|
||||
candidate_project_id,
|
||||
source_rowid,
|
||||
ROW_NUMBER() OVER (PARTITION BY project_path ORDER BY source_rowid) AS project_path_rank
|
||||
FROM source_rows
|
||||
),
|
||||
prepared_rows AS (
|
||||
SELECT
|
||||
CASE
|
||||
WHEN ROW_NUMBER() OVER (PARTITION BY candidate_project_id ORDER BY source_rowid) = 1
|
||||
THEN candidate_project_id
|
||||
ELSE ${SQLITE_UUID_SQL}
|
||||
END AS project_id,
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived
|
||||
FROM deduped_paths
|
||||
WHERE project_path_rank = 1
|
||||
)
|
||||
INSERT INTO projects__new (
|
||||
project_id,
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived
|
||||
)
|
||||
SELECT
|
||||
project_id,
|
||||
project_path,
|
||||
custom_project_name,
|
||||
isStarred,
|
||||
isArchived
|
||||
FROM prepared_rows
|
||||
`);
|
||||
db.exec('DROP TABLE projects');
|
||||
db.exec('ALTER TABLE projects__new RENAME TO projects');
|
||||
db.exec('COMMIT');
|
||||
} catch (migrationError) {
|
||||
db.exec('ROLLBACK');
|
||||
throw migrationError;
|
||||
} finally {
|
||||
db.exec('PRAGMA foreign_keys = ON');
|
||||
}
|
||||
};
|
||||
|
||||
const rebuildSessionsTableWithProjectSchema = (db: Database): void => {
|
||||
const hasSessions = tableExists(db, 'sessions');
|
||||
if (!hasSessions) {
|
||||
db.exec(SESSIONS_TABLE_SCHEMA_SQL);
|
||||
return;
|
||||
}
|
||||
|
||||
const sessionsTableInfo = getTableInfo(db, 'sessions');
|
||||
const columnNames = sessionsTableInfo.map((column) => column.name);
|
||||
const primaryKeyColumns = sessionsTableInfo
|
||||
.filter((column) => column.pk > 0)
|
||||
.sort((a, b) => a.pk - b.pk)
|
||||
.map((column) => column.name);
|
||||
|
||||
const shouldRebuild =
|
||||
!columnNames.includes('project_path') ||
|
||||
primaryKeyColumns.length !== 1 ||
|
||||
primaryKeyColumns[0] !== 'session_id' ||
|
||||
!columnNames.includes('provider');
|
||||
|
||||
if (!shouldRebuild) {
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'jsonl_path', 'TEXT');
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'created_at', 'DATETIME');
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'updated_at', 'DATETIME');
|
||||
db.exec('UPDATE sessions SET created_at = COALESCE(created_at, CURRENT_TIMESTAMP)');
|
||||
db.exec('UPDATE sessions SET updated_at = COALESCE(updated_at, CURRENT_TIMESTAMP)');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Rebuilding sessions table to project-based schema');
|
||||
|
||||
const projectPathExpression = columnNames.includes('project_path')
|
||||
? 'project_path'
|
||||
: columnNames.includes('workspace_path')
|
||||
? 'workspace_path'
|
||||
: 'NULL';
|
||||
|
||||
const providerExpression = columnNames.includes('provider')
|
||||
? "COALESCE(provider, 'claude')"
|
||||
: "'claude'";
|
||||
|
||||
const customNameExpression = columnNames.includes('custom_name')
|
||||
? 'custom_name'
|
||||
: 'NULL';
|
||||
|
||||
const jsonlPathExpression = columnNames.includes('jsonl_path')
|
||||
? 'jsonl_path'
|
||||
: 'NULL';
|
||||
|
||||
const createdAtExpression = columnNames.includes('created_at')
|
||||
? 'COALESCE(created_at, CURRENT_TIMESTAMP)'
|
||||
: 'CURRENT_TIMESTAMP';
|
||||
|
||||
const updatedAtExpression = columnNames.includes('updated_at')
|
||||
? 'COALESCE(updated_at, CURRENT_TIMESTAMP)'
|
||||
: 'CURRENT_TIMESTAMP';
|
||||
|
||||
db.exec('PRAGMA foreign_keys = OFF');
|
||||
try {
|
||||
db.exec('BEGIN TRANSACTION');
|
||||
db.exec('DROP TABLE IF EXISTS sessions__new');
|
||||
db.exec(`
|
||||
CREATE TABLE sessions__new (
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT,
|
||||
project_path TEXT,
|
||||
jsonl_path TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (session_id),
|
||||
FOREIGN KEY (project_path) REFERENCES projects(project_path)
|
||||
ON DELETE SET NULL
|
||||
ON UPDATE CASCADE
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
WITH source_rows AS (
|
||||
SELECT
|
||||
session_id,
|
||||
${providerExpression} AS provider,
|
||||
${customNameExpression} AS custom_name,
|
||||
${projectPathExpression} AS project_path,
|
||||
${jsonlPathExpression} AS jsonl_path,
|
||||
${createdAtExpression} AS created_at,
|
||||
${updatedAtExpression} AS updated_at,
|
||||
rowid AS source_rowid
|
||||
FROM sessions
|
||||
WHERE session_id IS NOT NULL AND trim(session_id) <> ''
|
||||
),
|
||||
ranked_rows AS (
|
||||
SELECT
|
||||
session_id,
|
||||
provider,
|
||||
custom_name,
|
||||
project_path,
|
||||
jsonl_path,
|
||||
created_at,
|
||||
updated_at,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY session_id
|
||||
ORDER BY datetime(COALESCE(updated_at, created_at)) DESC, source_rowid DESC
|
||||
) AS session_rank
|
||||
FROM source_rows
|
||||
)
|
||||
INSERT INTO sessions__new (
|
||||
session_id,
|
||||
provider,
|
||||
custom_name,
|
||||
project_path,
|
||||
jsonl_path,
|
||||
created_at,
|
||||
updated_at
|
||||
)
|
||||
SELECT
|
||||
session_id,
|
||||
provider,
|
||||
custom_name,
|
||||
project_path,
|
||||
jsonl_path,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM ranked_rows
|
||||
WHERE session_rank = 1
|
||||
`);
|
||||
db.exec('DROP TABLE sessions');
|
||||
db.exec('ALTER TABLE sessions__new RENAME TO sessions');
|
||||
db.exec('COMMIT');
|
||||
} catch (migrationError) {
|
||||
db.exec('ROLLBACK');
|
||||
throw migrationError;
|
||||
} finally {
|
||||
db.exec('PRAGMA foreign_keys = ON');
|
||||
}
|
||||
};
|
||||
|
||||
const ensureProjectsForSessionPaths = (db: Database): void => {
|
||||
if (!tableExists(db, 'sessions')) {
|
||||
return;
|
||||
}
|
||||
|
||||
db.exec(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived)
|
||||
SELECT
|
||||
${SQLITE_UUID_SQL},
|
||||
project_path,
|
||||
NULL,
|
||||
0,
|
||||
0
|
||||
FROM sessions
|
||||
WHERE project_path IS NOT NULL AND trim(project_path) <> ''
|
||||
ON CONFLICT(project_path) DO NOTHING
|
||||
`);
|
||||
};
|
||||
|
||||
export const runMigrations = (db: Database) => {
|
||||
try {
|
||||
const usersTableInfo = db.prepare('PRAGMA table_info(users)').all() as { name: string }[];
|
||||
const userColumnNames = usersTableInfo.map((column) => column.name);
|
||||
|
||||
addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_name', 'TEXT');
|
||||
addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_email', 'TEXT');
|
||||
addColumnToTableIfNotExists(
|
||||
db,
|
||||
'users',
|
||||
userColumnNames,
|
||||
'has_completed_onboarding',
|
||||
'BOOLEAN DEFAULT 0'
|
||||
);
|
||||
|
||||
db.exec(APP_CONFIG_TABLE_SCHEMA_SQL);
|
||||
db.exec(USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL);
|
||||
db.exec(VAPID_KEYS_TABLE_SCHEMA_SQL);
|
||||
db.exec(PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL);
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id)');
|
||||
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
rebuildProjectsTableWithPrimaryKeySchema(db);
|
||||
|
||||
migrateLegacyWorkspaceTableIntoProjects(db);
|
||||
rebuildSessionsTableWithProjectSchema(db);
|
||||
migrateLegacySessionNames(db);
|
||||
ensureProjectsForSessionPaths(db);
|
||||
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_sessions_project_path ON sessions(project_path)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_starred ON projects(isStarred)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_archived ON projects(isArchived)');
|
||||
|
||||
db.exec('DROP INDEX IF EXISTS idx_session_names_lookup');
|
||||
db.exec('DROP INDEX IF EXISTS idx_sessions_workspace_path');
|
||||
db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_is_starred');
|
||||
db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_workspace_id');
|
||||
|
||||
if (tableExists(db, 'workspace_original_paths')) {
|
||||
console.log('Running migration: Dropping legacy workspace_original_paths table');
|
||||
db.exec('DROP TABLE workspace_original_paths');
|
||||
}
|
||||
|
||||
db.exec(LAST_SCANNED_AT_SQL);
|
||||
console.log('Database migrations completed successfully');
|
||||
} catch (error: any) {
|
||||
console.error('Error running migrations:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
119
server/modules/database/repositories/api-keys.ts
Normal file
119
server/modules/database/repositories/api-keys.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
/**
|
||||
* API keys repository.
|
||||
*
|
||||
* Manages API keys used for external/programmatic access to the backend.
|
||||
* Keys are prefixed with `ck_` and tied to a user via foreign key.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type ApiKeyRow = {
|
||||
id: number;
|
||||
key_name: string;
|
||||
api_key: string;
|
||||
created_at: string;
|
||||
last_used: string | null;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
type CreateApiKeyResult = {
|
||||
id: number | bigint;
|
||||
keyName: string;
|
||||
apiKey: string;
|
||||
};
|
||||
|
||||
type ValidatedApiKeyUser = {
|
||||
id: number;
|
||||
username: string;
|
||||
api_key_id: number;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Generates a cryptographically random API key with the `ck_` prefix. */
|
||||
function generateApiKey(): string {
|
||||
return 'ck_' + crypto.randomBytes(32).toString('hex');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const apiKeysDb = {
|
||||
generateApiKey,
|
||||
|
||||
/** Creates a new API key for the given user and returns it for one-time display. */
|
||||
createApiKey(userId: number, keyName: string): CreateApiKeyResult {
|
||||
const db = getConnection();
|
||||
const apiKey = generateApiKey();
|
||||
const result = db
|
||||
.prepare(
|
||||
'INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)'
|
||||
)
|
||||
.run(userId, keyName, apiKey);
|
||||
return { id: result.lastInsertRowid, keyName, apiKey };
|
||||
},
|
||||
|
||||
/** Lists all API keys for a user, most recent first. */
|
||||
getApiKeys(userId: number): ApiKeyRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId) as ApiKeyRow[];
|
||||
},
|
||||
|
||||
/**
|
||||
* Validates an API key and resolves the owning user.
|
||||
* If the key is valid, its `last_used` timestamp is updated as a side effect.
|
||||
* Returns undefined when the key is invalid or the user is inactive.
|
||||
*/
|
||||
validateApiKey(apiKey: string): ValidatedApiKeyUser | undefined {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT u.id, u.username, ak.id as api_key_id
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1`
|
||||
)
|
||||
.get(apiKey) as ValidatedApiKeyUser | undefined;
|
||||
|
||||
if (row) {
|
||||
db.prepare(
|
||||
'UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?'
|
||||
).run(row.api_key_id);
|
||||
}
|
||||
|
||||
return row;
|
||||
},
|
||||
|
||||
/** Permanently removes an API key. Returns true if a row was deleted. */
|
||||
deleteApiKey(userId: number, apiKeyId: number): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?')
|
||||
.run(apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
|
||||
/** Enables or disables an API key without deleting it. */
|
||||
toggleApiKey(
|
||||
userId: number,
|
||||
apiKeyId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?'
|
||||
)
|
||||
.run(isActive ? 1 : 0, apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
};
|
||||
53
server/modules/database/repositories/app-config.ts
Normal file
53
server/modules/database/repositories/app-config.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* App config repository.
|
||||
*
|
||||
* Key-value store for application-level configuration that persists
|
||||
* across restarts (JWT secret, feature flags, etc.). Values are always
|
||||
* stored as strings; callers handle parsing.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const appConfigDb = {
|
||||
/** Returns the stored value for a config key, or null if missing. */
|
||||
get(key: string): string | null {
|
||||
try {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare('SELECT value FROM app_config WHERE key = ?')
|
||||
.get(key) as { value: string } | undefined;
|
||||
return row?.value ?? null;
|
||||
} catch {
|
||||
// Swallow errors so early-startup reads (e.g. JWT secret) do not crash.
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
/** Inserts or updates a config key (upsert). */
|
||||
set(key: string, value: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value'
|
||||
).run(key, value);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the JWT signing secret, generating and persisting one
|
||||
* if it does not already exist. This ensures the secret survives
|
||||
* server restarts while being created automatically on first boot.
|
||||
*/
|
||||
getOrCreateJwtSecret(): string {
|
||||
let secret = appConfigDb.get('jwt_secret');
|
||||
if (!secret) {
|
||||
secret = crypto.randomBytes(64).toString('hex');
|
||||
appConfigDb.set('jwt_secret', secret);
|
||||
}
|
||||
return secret;
|
||||
},
|
||||
};
|
||||
106
server/modules/database/repositories/credentials.ts
Normal file
106
server/modules/database/repositories/credentials.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* User credentials repository.
|
||||
*
|
||||
* Manages external service tokens (GitHub, GitLab, Bitbucket, etc.)
|
||||
* stored per-user. Each credential has a type discriminator so multiple
|
||||
* credential kinds can coexist in the same table.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import type {
|
||||
CreateCredentialResult,
|
||||
CredentialPublicRow,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const credentialsDb = {
|
||||
/** Stores a new credential and returns a safe (no raw value) result. */
|
||||
createCredential(
|
||||
userId: number,
|
||||
credentialName: string,
|
||||
credentialType: string,
|
||||
credentialValue: string,
|
||||
description: string | null = null
|
||||
): CreateCredentialResult {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)'
|
||||
)
|
||||
.run(userId, credentialName, credentialType, credentialValue, description);
|
||||
return {
|
||||
id: result.lastInsertRowid,
|
||||
credentialName,
|
||||
credentialType,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Lists credentials for a user (excluding raw values).
|
||||
* Optionally filters by credential type (e.g. 'github_token').
|
||||
*/
|
||||
getCredentials(
|
||||
userId: number,
|
||||
credentialType: string | null = null
|
||||
): CredentialPublicRow[] {
|
||||
const db = getConnection();
|
||||
|
||||
if (credentialType) {
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? AND credential_type = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId, credentialType) as CredentialPublicRow[];
|
||||
}
|
||||
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId) as CredentialPublicRow[];
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the raw credential value for the most recent active
|
||||
* credential of the given type, or null if none exists.
|
||||
*/
|
||||
getActiveCredential(
|
||||
userId: number,
|
||||
credentialType: string
|
||||
): string | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1'
|
||||
)
|
||||
.get(userId, credentialType) as { credential_value: string } | undefined;
|
||||
return row?.credential_value ?? null;
|
||||
},
|
||||
|
||||
/** Permanently removes a credential. Returns true if a row was deleted. */
|
||||
deleteCredential(userId: number, credentialId: number): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?')
|
||||
.run(credentialId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
|
||||
/** Enables or disables a credential without deleting it. */
|
||||
toggleCredential(
|
||||
userId: number,
|
||||
credentialId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?'
|
||||
)
|
||||
.run(isActive ? 1 : 0, credentialId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
};
|
||||
100
server/modules/database/repositories/github-tokens.ts
Normal file
100
server/modules/database/repositories/github-tokens.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* GitHub tokens repository.
|
||||
*
|
||||
* Backward-compatible helper layer over generic credentials storage.
|
||||
* Tokens are stored in `user_credentials` with `credential_type = 'github_token'`.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import { credentialsDb } from '@/modules/database/repositories/credentials.js';
|
||||
import type {
|
||||
CredentialPublicRow,
|
||||
CreateCredentialResult,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
const GITHUB_TOKEN_TYPE = 'github_token';
|
||||
|
||||
type CredentialRow = {
|
||||
id: number;
|
||||
user_id: number;
|
||||
credential_name: string;
|
||||
credential_type: string;
|
||||
credential_value: string;
|
||||
description: string | null;
|
||||
created_at: string;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
type GithubTokenLookup = CredentialRow & {
|
||||
github_token: string;
|
||||
};
|
||||
|
||||
export const githubTokensDb = {
|
||||
/** Creates a GitHub token credential entry. */
|
||||
createGithubToken(
|
||||
userId: number,
|
||||
tokenName: string,
|
||||
githubToken: string,
|
||||
description: string | null = null
|
||||
): CreateCredentialResult {
|
||||
return credentialsDb.createCredential(
|
||||
userId,
|
||||
tokenName,
|
||||
GITHUB_TOKEN_TYPE,
|
||||
githubToken,
|
||||
description
|
||||
);
|
||||
},
|
||||
|
||||
/** Returns all GitHub tokens (safe shape: no credential value). */
|
||||
getGithubTokens(userId: number): CredentialPublicRow[] {
|
||||
return credentialsDb.getCredentials(userId, GITHUB_TOKEN_TYPE);
|
||||
},
|
||||
|
||||
/** Returns the most recent active GitHub token value for a user. */
|
||||
getActiveGithubToken(userId: number): string | null {
|
||||
return credentialsDb.getActiveCredential(userId, GITHUB_TOKEN_TYPE);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a specific active GitHub token row by id/user, including
|
||||
* a `github_token` compatibility field.
|
||||
*/
|
||||
getGithubTokenById(userId: number, tokenId: number): GithubTokenLookup | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT *
|
||||
FROM user_credentials
|
||||
WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1`
|
||||
)
|
||||
.get(tokenId, userId, GITHUB_TOKEN_TYPE) as CredentialRow | undefined;
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
...row,
|
||||
github_token: row.credential_value,
|
||||
};
|
||||
},
|
||||
|
||||
/** Updates active state for a GitHub token. */
|
||||
updateGithubToken(
|
||||
userId: number,
|
||||
tokenId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
return credentialsDb.toggleCredential(userId, tokenId, isActive);
|
||||
},
|
||||
|
||||
/** Deletes a GitHub token. */
|
||||
deleteGithubToken(userId: number, tokenId: number): boolean {
|
||||
return credentialsDb.deleteCredential(userId, tokenId);
|
||||
},
|
||||
|
||||
// Legacy alias used by existing routes
|
||||
toggleGithubToken(userId: number, tokenId: number, isActive: boolean): boolean {
|
||||
return githubTokensDb.updateGithubToken(userId, tokenId, isActive);
|
||||
},
|
||||
};
|
||||
|
||||
103
server/modules/database/repositories/notification-preferences.ts
Normal file
103
server/modules/database/repositories/notification-preferences.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* Notification preferences repository.
|
||||
*
|
||||
* Stores per-user notification channel/event preferences as JSON.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type NotificationPreferences = {
|
||||
channels: {
|
||||
inApp: boolean;
|
||||
webPush: boolean;
|
||||
};
|
||||
events: {
|
||||
actionRequired: boolean;
|
||||
stop: boolean;
|
||||
error: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
const DEFAULT_NOTIFICATION_PREFERENCES: NotificationPreferences = {
|
||||
channels: {
|
||||
inApp: false,
|
||||
webPush: false,
|
||||
},
|
||||
events: {
|
||||
actionRequired: true,
|
||||
stop: true,
|
||||
error: true,
|
||||
},
|
||||
};
|
||||
|
||||
function normalizeNotificationPreferences(value: unknown): NotificationPreferences {
|
||||
const source = value && typeof value === 'object' ? (value as Record<string, any>) : {};
|
||||
|
||||
return {
|
||||
channels: {
|
||||
inApp: source.channels?.inApp === true,
|
||||
webPush: source.channels?.webPush === true,
|
||||
},
|
||||
events: {
|
||||
actionRequired: source.events?.actionRequired !== false,
|
||||
stop: source.events?.stop !== false,
|
||||
error: source.events?.error !== false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const notificationPreferencesDb = {
|
||||
/** Returns the normalized preferences for a user, creating defaults on first read. */
|
||||
getNotificationPreferences(userId: number): NotificationPreferences {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?'
|
||||
)
|
||||
.get(userId) as { preferences_json: string } | undefined;
|
||||
|
||||
if (!row) {
|
||||
const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES);
|
||||
db.prepare(
|
||||
'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)'
|
||||
).run(userId, JSON.stringify(defaults));
|
||||
return defaults;
|
||||
}
|
||||
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(row.preferences_json);
|
||||
} catch {
|
||||
parsed = DEFAULT_NOTIFICATION_PREFERENCES;
|
||||
}
|
||||
return normalizeNotificationPreferences(parsed);
|
||||
},
|
||||
|
||||
/** Upserts normalized preferences for a user and returns the stored value. */
|
||||
updateNotificationPreferences(
|
||||
userId: number,
|
||||
preferences: unknown
|
||||
): NotificationPreferences {
|
||||
const normalized = normalizeNotificationPreferences(preferences);
|
||||
const db = getConnection();
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(user_id) DO UPDATE SET
|
||||
preferences_json = excluded.preferences_json,
|
||||
updated_at = CURRENT_TIMESTAMP`
|
||||
).run(userId, JSON.stringify(normalized));
|
||||
|
||||
return normalized;
|
||||
},
|
||||
|
||||
// Legacy aliases used by existing services/routes
|
||||
getPreferences(userId: number): NotificationPreferences {
|
||||
return notificationPreferencesDb.getNotificationPreferences(userId);
|
||||
},
|
||||
updatePreferences(userId: number, preferences: unknown): NotificationPreferences {
|
||||
return notificationPreferencesDb.updateNotificationPreferences(userId, preferences);
|
||||
},
|
||||
};
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtemp, rm } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import { closeConnection } from '@/modules/database/connection.js';
|
||||
import { initializeDatabase } from '@/modules/database/init-db.js';
|
||||
import { projectsDb } from '@/modules/database/repositories/projects.db.js';
|
||||
|
||||
async function withIsolatedDatabase(runTest: () => void | Promise<void>): Promise<void> {
|
||||
const previousDatabasePath = process.env.DATABASE_PATH;
|
||||
const tempDirectory = await mkdtemp(path.join(tmpdir(), 'projects-db-'));
|
||||
const databasePath = path.join(tempDirectory, 'auth.db');
|
||||
|
||||
closeConnection();
|
||||
process.env.DATABASE_PATH = databasePath;
|
||||
await initializeDatabase();
|
||||
|
||||
try {
|
||||
await runTest();
|
||||
} finally {
|
||||
closeConnection();
|
||||
if (previousDatabasePath === undefined) {
|
||||
delete process.env.DATABASE_PATH;
|
||||
} else {
|
||||
process.env.DATABASE_PATH = previousDatabasePath;
|
||||
}
|
||||
await rm(tempDirectory, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
test('projectsDb.createProjectPath returns created for fresh paths', async () => {
|
||||
await withIsolatedDatabase(() => {
|
||||
const created = projectsDb.createProjectPath('/workspace/new-project');
|
||||
|
||||
assert.equal(created.outcome, 'created');
|
||||
assert.ok(created.project);
|
||||
assert.equal(created.project?.project_path, '/workspace/new-project');
|
||||
assert.equal(created.project?.isArchived, 0);
|
||||
});
|
||||
});
|
||||
|
||||
test('projectsDb.createProjectPath returns reactivated_archived for archived duplicates', async () => {
|
||||
await withIsolatedDatabase(() => {
|
||||
const initial = projectsDb.createProjectPath('/workspace/archived-project', 'Archived Project');
|
||||
assert.equal(initial.outcome, 'created');
|
||||
assert.ok(initial.project);
|
||||
|
||||
projectsDb.updateProjectIsArchived('/workspace/archived-project', true);
|
||||
|
||||
const reused = projectsDb.createProjectPath('/workspace/archived-project', 'Renamed Project');
|
||||
assert.equal(reused.outcome, 'reactivated_archived');
|
||||
assert.ok(reused.project);
|
||||
assert.equal(reused.project?.project_id, initial.project?.project_id);
|
||||
assert.equal(reused.project?.isArchived, 0);
|
||||
});
|
||||
});
|
||||
|
||||
test('projectsDb.createProjectPath returns active_conflict for active duplicates', async () => {
|
||||
await withIsolatedDatabase(() => {
|
||||
const initial = projectsDb.createProjectPath('/workspace/active-project');
|
||||
assert.equal(initial.outcome, 'created');
|
||||
assert.ok(initial.project);
|
||||
|
||||
const conflict = projectsDb.createProjectPath('/workspace/active-project');
|
||||
assert.equal(conflict.outcome, 'active_conflict');
|
||||
assert.ok(conflict.project);
|
||||
assert.equal(conflict.project?.project_id, initial.project?.project_id);
|
||||
assert.equal(conflict.project?.isArchived, 0);
|
||||
});
|
||||
});
|
||||
183
server/modules/database/repositories/projects.db.ts
Normal file
183
server/modules/database/repositories/projects.db.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import path from 'node:path';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import type { CreateProjectPathResult, ProjectRepositoryRow } from '@/shared/types.js';
|
||||
import { normalizeProjectPath } from '@/shared/utils.js';
|
||||
|
||||
function normalizeProjectDisplayName(projectPath: string, customProjectName: string | null): string {
|
||||
const trimmedCustomName = typeof customProjectName === 'string' ? customProjectName.trim() : '';
|
||||
if (trimmedCustomName.length > 0) {
|
||||
return trimmedCustomName;
|
||||
}
|
||||
|
||||
const directoryName = path.basename(projectPath);
|
||||
return directoryName || projectPath;
|
||||
}
|
||||
|
||||
export const projectsDb = {
|
||||
createProjectPath(projectPath: string, customProjectName: string | null = null): CreateProjectPathResult {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const normalizedProjectName = normalizeProjectDisplayName(normalizedProjectPath, customProjectName);
|
||||
const attemptedId = randomUUID();
|
||||
const row = db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isArchived)
|
||||
VALUES (?, ?, ?, 0)
|
||||
ON CONFLICT(project_path) DO UPDATE SET
|
||||
isArchived = 0
|
||||
WHERE projects.isArchived = 1
|
||||
RETURNING project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
`).get(attemptedId, normalizedProjectPath, normalizedProjectName) as ProjectRepositoryRow | undefined;
|
||||
|
||||
if (row) {
|
||||
return {
|
||||
outcome: row.project_id === attemptedId ? 'created' : 'reactivated_archived',
|
||||
project: row,
|
||||
};
|
||||
}
|
||||
|
||||
const existingProject = projectsDb.getProjectPath(normalizedProjectPath);
|
||||
return {
|
||||
outcome: 'active_conflict',
|
||||
project: existingProject,
|
||||
};
|
||||
},
|
||||
|
||||
getProjectPath(projectPath: string): ProjectRepositoryRow | null {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(normalizedProjectPath) as ProjectRepositoryRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getProjectById(projectId: string): ProjectRepositoryRow | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_id = ?
|
||||
`).get(projectId) as ProjectRepositoryRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Resolve the absolute project directory from a database project_id.
|
||||
*
|
||||
* This is the canonical lookup used after the projectName → projectId migration:
|
||||
* API routes receive the DB-assigned `projectId` and must resolve the real folder
|
||||
* path through this helper before touching the filesystem. Returns `null` when the
|
||||
* project row does not exist so callers can respond with a 404.
|
||||
*/
|
||||
getProjectPathById(projectId: string): string | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_path
|
||||
FROM projects
|
||||
WHERE project_id = ?
|
||||
`).get(projectId) as Pick<ProjectRepositoryRow, 'project_path'> | undefined;
|
||||
|
||||
return row?.project_path ?? null;
|
||||
},
|
||||
|
||||
getProjectPaths(): ProjectRepositoryRow[] {
|
||||
const db = getConnection();
|
||||
return db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE isArchived = 0
|
||||
`).all() as ProjectRepositoryRow[];
|
||||
},
|
||||
|
||||
getCustomProjectName(projectPath: string): string | null {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const row = db.prepare(`
|
||||
SELECT custom_project_name
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(normalizedProjectPath) as Pick<ProjectRepositoryRow, 'custom_project_name'> | undefined;
|
||||
|
||||
return row?.custom_project_name ?? null;
|
||||
},
|
||||
|
||||
updateCustomProjectName(projectPath: string, customProjectName: string | null): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(project_path) DO UPDATE SET custom_project_name = excluded.custom_project_name
|
||||
`).run(randomUUID(), normalizedProjectPath, customProjectName);
|
||||
},
|
||||
|
||||
updateCustomProjectNameById(projectId: string, customProjectName: string | null): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET custom_project_name = ?
|
||||
WHERE project_id = ?
|
||||
`).run(customProjectName, projectId);
|
||||
},
|
||||
|
||||
updateProjectIsStarred(projectPath: string, isStarred: boolean): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isStarred = ?
|
||||
WHERE project_path = ?
|
||||
`).run(isStarred ? 1 : 0, normalizedProjectPath);
|
||||
},
|
||||
|
||||
updateProjectIsStarredById(projectId: string, isStarred: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isStarred = ?
|
||||
WHERE project_id = ?
|
||||
`).run(isStarred ? 1 : 0, projectId);
|
||||
},
|
||||
|
||||
updateProjectIsArchived(projectPath: string, isArchived: boolean): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isArchived = ?
|
||||
WHERE project_path = ?
|
||||
`).run(isArchived ? 1 : 0, normalizedProjectPath);
|
||||
},
|
||||
|
||||
updateProjectIsArchivedById(projectId: string, isArchived: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isArchived = ?
|
||||
WHERE project_id = ?
|
||||
`).run(isArchived ? 1 : 0, projectId);
|
||||
},
|
||||
|
||||
deleteProjectPath(projectPath: string): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`
|
||||
DELETE FROM projects
|
||||
WHERE project_path = ?
|
||||
`).run(normalizedProjectPath);
|
||||
},
|
||||
|
||||
deleteProjectById(projectId: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
DELETE FROM projects
|
||||
WHERE project_id = ?
|
||||
`).run(projectId);
|
||||
},
|
||||
};
|
||||
80
server/modules/database/repositories/push-subscriptions.ts
Normal file
80
server/modules/database/repositories/push-subscriptions.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* Push subscriptions repository.
|
||||
*
|
||||
* Persists browser push subscription endpoints and keys per user.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type PushSubscriptionLookupRow = {
|
||||
endpoint: string;
|
||||
keys_p256dh: string;
|
||||
keys_auth: string;
|
||||
};
|
||||
|
||||
export const pushSubscriptionsDb = {
|
||||
/** Upserts a push subscription endpoint for a user. */
|
||||
createPushSubscription(
|
||||
userId: number,
|
||||
endpoint: string,
|
||||
keysP256dh: string,
|
||||
keysAuth: string
|
||||
): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
`INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(endpoint) DO UPDATE SET
|
||||
user_id = excluded.user_id,
|
||||
keys_p256dh = excluded.keys_p256dh,
|
||||
keys_auth = excluded.keys_auth`
|
||||
).run(userId, endpoint, keysP256dh, keysAuth);
|
||||
},
|
||||
|
||||
/** Returns all subscriptions for a user. */
|
||||
getPushSubscriptions(userId: number): PushSubscriptionLookupRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?'
|
||||
)
|
||||
.all(userId) as PushSubscriptionLookupRow[];
|
||||
},
|
||||
|
||||
/** Deletes one subscription by endpoint. */
|
||||
deletePushSubscription(endpoint: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint);
|
||||
},
|
||||
|
||||
/** Deletes all subscriptions for a user. */
|
||||
deletePushSubscriptionsForUser(userId: number): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId);
|
||||
},
|
||||
|
||||
// Legacy aliases used by existing services/routes
|
||||
saveSubscription(
|
||||
userId: number,
|
||||
endpoint: string,
|
||||
keysP256dh: string,
|
||||
keysAuth: string
|
||||
): void {
|
||||
pushSubscriptionsDb.createPushSubscription(
|
||||
userId,
|
||||
endpoint,
|
||||
keysP256dh,
|
||||
keysAuth
|
||||
);
|
||||
},
|
||||
getSubscriptions(userId: number): PushSubscriptionLookupRow[] {
|
||||
return pushSubscriptionsDb.getPushSubscriptions(userId);
|
||||
},
|
||||
removeSubscription(endpoint: string): void {
|
||||
pushSubscriptionsDb.deletePushSubscription(endpoint);
|
||||
},
|
||||
removeAllForUser(userId: number): void {
|
||||
pushSubscriptionsDb.deletePushSubscriptionsForUser(userId);
|
||||
},
|
||||
};
|
||||
|
||||
42
server/modules/database/repositories/scan-state.db.ts
Normal file
42
server/modules/database/repositories/scan-state.db.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type ScanStateRow = {
|
||||
last_scanned_at: string;
|
||||
};
|
||||
|
||||
export const scanStateDb = {
|
||||
getLastScannedAt() {
|
||||
const db = getConnection();
|
||||
|
||||
const row = db
|
||||
.prepare(`SELECT last_scanned_at FROM scan_state WHERE id = 1`)
|
||||
.get() as ScanStateRow;
|
||||
|
||||
if (!row) {
|
||||
return null; // Before any scan, the row is undefined.
|
||||
}
|
||||
|
||||
let lastScannedDate: Date | null = null;
|
||||
const lastScannedStr = row.last_scanned_at;
|
||||
|
||||
if (lastScannedStr) {
|
||||
// SQLite CURRENT_TIMESTAMP returns UTC in "YYYY-MM-DD HH:MM:SS" format.
|
||||
// Replace space with 'T' and append 'Z' to parse reliably in JS across all platforms.
|
||||
lastScannedDate = new Date(lastScannedStr.replace(' ', 'T') + 'Z');
|
||||
}
|
||||
|
||||
return lastScannedDate;
|
||||
},
|
||||
|
||||
updateLastScannedAt(scannedAt: Date = new Date()) {
|
||||
const db = getConnection();
|
||||
const sqliteTimestamp = scannedAt.toISOString().slice(0, 19).replace('T', ' ');
|
||||
|
||||
db.prepare(`
|
||||
INSERT INTO scan_state (id, last_scanned_at)
|
||||
VALUES (1, ?)
|
||||
ON CONFLICT (id)
|
||||
DO UPDATE SET last_scanned_at = excluded.last_scanned_at
|
||||
`).run(sqliteTimestamp);
|
||||
}
|
||||
};
|
||||
174
server/modules/database/repositories/sessions.db.ts
Normal file
174
server/modules/database/repositories/sessions.db.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import { projectsDb } from '@/modules/database/repositories/projects.db.js';
|
||||
import { normalizeProjectPath } from '@/shared/utils.js';
|
||||
|
||||
type SessionRow = {
|
||||
session_id: string;
|
||||
provider: string;
|
||||
project_path: string | null;
|
||||
jsonl_path: string | null;
|
||||
custom_name: string | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
};
|
||||
|
||||
type SessionMetadataLookupRow = Pick<
|
||||
SessionRow,
|
||||
'session_id' | 'provider' | 'project_path' | 'jsonl_path' | 'custom_name' | 'created_at' | 'updated_at'
|
||||
>;
|
||||
|
||||
function normalizeTimestamp(value?: string): string | null {
|
||||
if (!value) return null;
|
||||
|
||||
const parsed = new Date(value);
|
||||
if (Number.isNaN(parsed.getTime())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsed.toISOString();
|
||||
}
|
||||
|
||||
function normalizeProjectPathForProvider(provider: string, projectPath: string): string {
|
||||
void provider;
|
||||
return normalizeProjectPath(projectPath);
|
||||
}
|
||||
|
||||
export const sessionsDb = {
|
||||
createSession(
|
||||
sessionId: string,
|
||||
provider: string,
|
||||
projectPath: string,
|
||||
customName?: string,
|
||||
createdAt?: string,
|
||||
updatedAt?: string,
|
||||
jsonlPath?: string | null
|
||||
): string {
|
||||
const db = getConnection();
|
||||
const createdAtValue = normalizeTimestamp(createdAt);
|
||||
const updatedAtValue = normalizeTimestamp(updatedAt);
|
||||
const normalizedProjectPath = normalizeProjectPathForProvider(provider, projectPath);
|
||||
|
||||
// First, ensure the project path is recorded in the projects table,
|
||||
// since it's a foreign key in the sessions table.
|
||||
projectsDb.createProjectPath(normalizedProjectPath);
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO sessions (session_id, provider, custom_name, project_path, jsonl_path, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, COALESCE(?, CURRENT_TIMESTAMP), COALESCE(?, CURRENT_TIMESTAMP))
|
||||
ON CONFLICT(session_id) DO UPDATE SET
|
||||
provider = excluded.provider,
|
||||
updated_at = excluded.updated_at,
|
||||
project_path = excluded.project_path,
|
||||
jsonl_path = excluded.jsonl_path,
|
||||
custom_name = COALESCE(excluded.custom_name, sessions.custom_name)`
|
||||
).run(
|
||||
sessionId,
|
||||
provider,
|
||||
customName ?? null,
|
||||
normalizedProjectPath,
|
||||
jsonlPath ?? null,
|
||||
createdAtValue,
|
||||
updatedAtValue
|
||||
);
|
||||
|
||||
return sessionId;
|
||||
},
|
||||
|
||||
updateSessionCustomName(sessionId: string, customName: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
`UPDATE sessions
|
||||
SET custom_name = ?
|
||||
WHERE session_id = ?`
|
||||
).run(customName, sessionId);
|
||||
},
|
||||
|
||||
getSessionById(sessionId: string): SessionMetadataLookupRow | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE session_id = ?
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 1`
|
||||
)
|
||||
.get(sessionId) as SessionMetadataLookupRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getAllSessions(): SessionRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions`
|
||||
)
|
||||
.all() as SessionRow[];
|
||||
},
|
||||
|
||||
getSessionsByProjectPath(projectPath: string): SessionRow[] {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE project_path = ?`
|
||||
)
|
||||
.all(normalizedProjectPath) as SessionRow[];
|
||||
},
|
||||
|
||||
getSessionsByProjectPathPage(projectPath: string, limit: number, offset: number): SessionRow[] {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE project_path = ?
|
||||
ORDER BY datetime(COALESCE(updated_at, created_at)) DESC, session_id DESC
|
||||
LIMIT ? OFFSET ?`
|
||||
)
|
||||
.all(normalizedProjectPath, limit, offset) as SessionRow[];
|
||||
},
|
||||
|
||||
countSessionsByProjectPath(projectPath: string): number {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT COUNT(*) AS count
|
||||
FROM sessions
|
||||
WHERE project_path = ?`
|
||||
)
|
||||
.get(normalizedProjectPath) as { count: number } | undefined;
|
||||
|
||||
return Number(row?.count ?? 0);
|
||||
},
|
||||
|
||||
deleteSessionsByProjectPath(projectPath: string): void {
|
||||
const db = getConnection();
|
||||
const normalizedProjectPath = normalizeProjectPath(projectPath);
|
||||
db.prepare(`DELETE FROM sessions WHERE project_path = ?`).run(normalizedProjectPath);
|
||||
},
|
||||
|
||||
getSessionName(sessionId: string, provider: string): string | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT custom_name
|
||||
FROM sessions
|
||||
WHERE session_id = ? AND provider = ?`
|
||||
)
|
||||
.get(sessionId, provider) as { custom_name: string | null } | undefined;
|
||||
|
||||
return row?.custom_name ?? null;
|
||||
},
|
||||
|
||||
deleteSessionById(sessionId: string): boolean {
|
||||
const db = getConnection();
|
||||
return db.prepare('DELETE FROM sessions WHERE session_id = ?').run(sessionId).changes > 0;
|
||||
},
|
||||
};
|
||||
140
server/modules/database/repositories/users.ts
Normal file
140
server/modules/database/repositories/users.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* User repository.
|
||||
*
|
||||
* Provides typed CRUD operations for the `users` table.
|
||||
* This is a single-user system, but the schema supports multiple
|
||||
* users for forward compatibility.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type UserRow = {
|
||||
id: number;
|
||||
username: string;
|
||||
password_hash: string;
|
||||
created_at: string;
|
||||
last_login: string | null;
|
||||
is_active: number;
|
||||
git_name: string | null;
|
||||
git_email: string | null;
|
||||
has_completed_onboarding: number;
|
||||
};
|
||||
|
||||
type UserPublicRow = Pick<UserRow, 'id' | 'username' | 'created_at' | 'last_login'>;
|
||||
|
||||
type UserGitConfig = {
|
||||
git_name: string | null;
|
||||
git_email: string | null;
|
||||
};
|
||||
|
||||
type CreateUserResult = {
|
||||
id: number | bigint;
|
||||
username: string;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const userDb = {
|
||||
/** Returns true if at least one user exists in the database. */
|
||||
hasUsers(): boolean {
|
||||
const db = getConnection();
|
||||
const row = db.prepare('SELECT COUNT(*) as count FROM users').get() as {
|
||||
count: number;
|
||||
};
|
||||
return row.count > 0;
|
||||
},
|
||||
|
||||
/** Inserts a new user and returns the created ID + username. */
|
||||
createUser(username: string, passwordHash: string): CreateUserResult {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)')
|
||||
.run(username, passwordHash);
|
||||
return { id: result.lastInsertRowid, username };
|
||||
},
|
||||
|
||||
/**
|
||||
* Looks up an active user by username.
|
||||
* Returns the full row (including password hash) for auth verification.
|
||||
*/
|
||||
getUserByUsername(username: string): UserRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare('SELECT * FROM users WHERE username = ? AND is_active = 1')
|
||||
.get(username) as UserRow | undefined;
|
||||
},
|
||||
|
||||
/** Updates the last_login timestamp. Non-fatal — logs but does not throw. */
|
||||
updateLastLogin(userId: number): void {
|
||||
try {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?'
|
||||
).run(userId);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error('Failed to update last login', { error: message });
|
||||
}
|
||||
},
|
||||
|
||||
/** Returns public user fields by ID (no password hash). */
|
||||
getUserById(userId: number): UserPublicRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1'
|
||||
)
|
||||
.get(userId) as UserPublicRow | undefined;
|
||||
},
|
||||
|
||||
/** Returns the first active user. Used for single-user mode lookups. */
|
||||
getFirstUser(): UserPublicRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1'
|
||||
)
|
||||
.get() as UserPublicRow | undefined;
|
||||
},
|
||||
|
||||
/** Stores the user's preferred git name and email. */
|
||||
updateGitConfig(
|
||||
userId: number,
|
||||
gitName: string,
|
||||
gitEmail: string
|
||||
): void {
|
||||
const db = getConnection();
|
||||
db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?').run(
|
||||
gitName,
|
||||
gitEmail,
|
||||
userId
|
||||
);
|
||||
},
|
||||
|
||||
/** Retrieves the user's git identity (name + email). */
|
||||
getGitConfig(userId: number): UserGitConfig | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare('SELECT git_name, git_email FROM users WHERE id = ?')
|
||||
.get(userId) as UserGitConfig | undefined;
|
||||
},
|
||||
|
||||
/** Marks onboarding as complete for the given user. */
|
||||
completeOnboarding(userId: number): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'UPDATE users SET has_completed_onboarding = 1 WHERE id = ?'
|
||||
).run(userId);
|
||||
},
|
||||
|
||||
/** Returns true if the user has finished the onboarding flow. */
|
||||
hasCompletedOnboarding(userId: number): boolean {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare('SELECT has_completed_onboarding FROM users WHERE id = ?')
|
||||
.get(userId) as { has_completed_onboarding: number } | undefined;
|
||||
return row?.has_completed_onboarding === 1;
|
||||
},
|
||||
};
|
||||
57
server/modules/database/repositories/vapid-keys.ts
Normal file
57
server/modules/database/repositories/vapid-keys.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* VAPID keys repository.
|
||||
*
|
||||
* Stores and retrieves the Web Push VAPID key pair.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type VapidKeyRow = {
|
||||
public_key: string;
|
||||
private_key: string;
|
||||
};
|
||||
|
||||
type VapidKeyPair = {
|
||||
publicKey: string;
|
||||
privateKey: string;
|
||||
};
|
||||
|
||||
export const vapidKeysDb = {
|
||||
/** Returns the latest stored VAPID key pair, or null when unset. */
|
||||
getVapidKeys(): VapidKeyPair | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT public_key, private_key FROM vapid_keys ORDER BY id DESC LIMIT 1'
|
||||
)
|
||||
.get() as Pick<VapidKeyRow, 'public_key' | 'private_key'> | undefined;
|
||||
|
||||
if (!row) return null;
|
||||
return {
|
||||
publicKey: row.public_key,
|
||||
privateKey: row.private_key,
|
||||
};
|
||||
},
|
||||
|
||||
/** Persists a new VAPID key pair. */
|
||||
createVapidKeys(publicKey: string, privateKey: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'INSERT INTO vapid_keys (public_key, private_key) VALUES (?, ?)'
|
||||
).run(publicKey, privateKey);
|
||||
},
|
||||
|
||||
/** Replaces all existing keys with a fresh pair. */
|
||||
updateVapidKeys(publicKey: string, privateKey: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM vapid_keys').run();
|
||||
vapidKeysDb.createVapidKeys(publicKey, privateKey);
|
||||
},
|
||||
|
||||
/** Deletes all VAPID key rows. */
|
||||
deleteVapidKeys(): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM vapid_keys').run();
|
||||
},
|
||||
};
|
||||
|
||||
152
server/modules/database/schema.ts
Normal file
152
server/modules/database/schema.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
const USER_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_login DATETIME,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
git_name TEXT,
|
||||
git_email TEXT,
|
||||
has_completed_onboarding BOOLEAN DEFAULT 0
|
||||
);
|
||||
`;
|
||||
|
||||
export const API_KEYS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
key_name TEXT NOT NULL,
|
||||
api_key TEXT UNIQUE NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_used DATETIME,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const USER_CREDENTIALS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS user_credentials (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
credential_name TEXT NOT NULL,
|
||||
credential_type TEXT NOT NULL, -- 'github_token', 'gitlab_token', 'bitbucket_token', etc.
|
||||
credential_value TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS user_notification_preferences (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
preferences_json TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const VAPID_KEYS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS vapid_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_key TEXT NOT NULL,
|
||||
private_key TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
export const PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
endpoint TEXT NOT NULL UNIQUE,
|
||||
keys_p256dh TEXT NOT NULL,
|
||||
keys_auth TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const PROJECTS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
project_id TEXT PRIMARY KEY NOT NULL,
|
||||
project_path TEXT NOT NULL UNIQUE,
|
||||
custom_project_name TEXT DEFAULT NULL,
|
||||
isStarred BOOLEAN DEFAULT 0,
|
||||
isArchived BOOLEAN DEFAULT 0
|
||||
);
|
||||
`;
|
||||
|
||||
export const SESSIONS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT,
|
||||
project_path TEXT,
|
||||
jsonl_path TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (session_id),
|
||||
FOREIGN KEY (project_path) REFERENCES projects(project_path)
|
||||
ON DELETE SET NULL
|
||||
ON UPDATE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const LAST_SCANNED_AT_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS scan_state (
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1),
|
||||
last_scanned_at TIMESTAMP NULL
|
||||
);
|
||||
`;
|
||||
|
||||
export const APP_CONFIG_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS app_config (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
export const INIT_SCHEMA_SQL = `
|
||||
-- Initialize authentication database
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
${USER_TABLE_SCHEMA_SQL}
|
||||
-- Indexes for performance for user lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
|
||||
|
||||
${API_KEYS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active);
|
||||
|
||||
${USER_CREDENTIALS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active);
|
||||
|
||||
${USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_user_notification_preferences_user_id ON user_notification_preferences(user_id);
|
||||
|
||||
${VAPID_KEYS_TABLE_SCHEMA_SQL}
|
||||
|
||||
${PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id);
|
||||
|
||||
${PROJECTS_TABLE_SCHEMA_SQL}
|
||||
-- NOTE: These indexes are created in migrations after legacy table-shape repairs.
|
||||
-- Creating them here can fail on upgraded installs where projects lacks those columns.
|
||||
|
||||
${SESSIONS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id);
|
||||
-- NOTE: This index is created in migrations after sessions is rebuilt to include project_path.
|
||||
-- Creating it here can fail on upgraded installs where the legacy sessions table has no project_path.
|
||||
|
||||
${LAST_SCANNED_AT_SQL}
|
||||
|
||||
${APP_CONFIG_TABLE_SCHEMA_SQL}
|
||||
`;
|
||||
6
server/modules/projects/index.ts
Normal file
6
server/modules/projects/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export {
|
||||
generateDisplayName,
|
||||
getProjectsWithSessions,
|
||||
} from './services/projects-with-sessions-fetch.service.js';
|
||||
export { updateProjectDisplayName } from './services/project-management.service.js';
|
||||
export { deleteOrArchiveProject, deleteSessionJsonlFilesForProjectPath } from './services/project-delete.service.js';
|
||||
247
server/modules/projects/projects.routes.ts
Normal file
247
server/modules/projects/projects.routes.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import express from 'express';
|
||||
|
||||
import { createProject, updateProjectDisplayName } from '@/modules/projects/services/project-management.service.js';
|
||||
import { startCloneProject } from '@/modules/projects/services/project-clone.service.js';
|
||||
import { getProjectTaskMaster } from '@/modules/projects/services/projects-has-taskmaster.service.js';
|
||||
import { AppError, asyncHandler } from '@/shared/utils.js';
|
||||
import { getProjectSessionsPage, getProjectsWithSessions } from '@/modules/projects/services/projects-with-sessions-fetch.service.js';
|
||||
import { deleteOrArchiveProject } from '@/modules/projects/services/project-delete.service.js';
|
||||
import { applyLegacyStarredProjectIds, toggleProjectStar } from '@/modules/projects/services/project-star.service.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
type AuthenticatedUser = {
|
||||
id?: number | string;
|
||||
};
|
||||
|
||||
function readQueryStringValue(value: unknown): string {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (Array.isArray(value) && typeof value[0] === 'string') {
|
||||
return value[0];
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
function readOptionalNumericQueryValue(value: unknown): number | null {
|
||||
const rawValue = readQueryStringValue(value).trim();
|
||||
if (!rawValue) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsedValue = Number.parseInt(rawValue, 10);
|
||||
return Number.isNaN(parsedValue) ? null : parsedValue;
|
||||
}
|
||||
|
||||
function parseNonNegativeIntQuery(value: unknown, name: string, fallback: number): number {
|
||||
const rawValue = readQueryStringValue(value).trim();
|
||||
if (!rawValue) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
const parsedValue = Number.parseInt(rawValue, 10);
|
||||
if (Number.isNaN(parsedValue) || parsedValue < 0) {
|
||||
throw new AppError(`${name} must be a non-negative integer`, {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return parsedValue;
|
||||
}
|
||||
|
||||
function resolveRouteErrorMessage(error: unknown): string {
|
||||
if (error instanceof AppError) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return 'Failed to clone repository';
|
||||
}
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (_req, res) => {
|
||||
const projects = await getProjectsWithSessions();
|
||||
res.json(projects);
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:projectId/sessions',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const limit = parseNonNegativeIntQuery(req.query.limit, 'limit', 20);
|
||||
const offset = parseNonNegativeIntQuery(req.query.offset, 'offset', 0);
|
||||
const sessionsPage = await getProjectSessionsPage(projectId, { limit, offset });
|
||||
res.json(sessionsPage);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/create-project',
|
||||
asyncHandler(async (req, res) => {
|
||||
const requestBody = req.body as Record<string, unknown>;
|
||||
const projectPath = typeof requestBody.path === 'string' ? requestBody.path : '';
|
||||
const customName = typeof requestBody.customName === 'string' ? requestBody.customName : null;
|
||||
|
||||
if (requestBody.workspaceType !== undefined) {
|
||||
throw new AppError('workspaceType is no longer supported. Use the single create-project flow.', {
|
||||
code: 'LEGACY_WORKSPACE_TYPE_UNSUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (requestBody.githubUrl || requestBody.githubTokenId || requestBody.newGithubToken) {
|
||||
throw new AppError('Repository cloning is not supported on create-project', {
|
||||
code: 'CLONE_NOT_SUPPORTED_ON_CREATE_PROJECT',
|
||||
statusCode: 400,
|
||||
details: 'Use /api/projects/clone-progress for cloning workflows',
|
||||
});
|
||||
}
|
||||
|
||||
const projectCreationResult = await createProject({
|
||||
projectPath,
|
||||
customName,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
project: projectCreationResult.project,
|
||||
message:
|
||||
projectCreationResult.outcome === 'reactivated_archived'
|
||||
? 'Archived project path reused successfully'
|
||||
: 'Project created successfully',
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* One-time (or idempotent) migration: apply legacy `localStorage` starred projectIds to the DB, then clear client storage.
|
||||
*/
|
||||
router.post(
|
||||
'/migrate-legacy-stars',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectIds = Array.isArray((req.body as { projectIds?: unknown })?.projectIds)
|
||||
? ((req.body as { projectIds: unknown[] }).projectIds as unknown[]).map((x) => String(x))
|
||||
: [];
|
||||
const { updated } = applyLegacyStarredProjectIds(projectIds);
|
||||
res.json({ success: true, updated });
|
||||
}),
|
||||
);
|
||||
|
||||
router.get('/clone-progress', async (req, res) => {
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const sendEvent = (type: string, data: Record<string, unknown>) => {
|
||||
if (res.writableEnded) {
|
||||
return;
|
||||
}
|
||||
|
||||
res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`);
|
||||
};
|
||||
|
||||
let cloneOperation: Awaited<ReturnType<typeof startCloneProject>> | null = null;
|
||||
const closeListener = () => {
|
||||
cloneOperation?.cancel();
|
||||
};
|
||||
req.on('close', closeListener);
|
||||
|
||||
try {
|
||||
const queryParams = req.query as Record<string, unknown>;
|
||||
const workspacePath = readQueryStringValue(queryParams.path);
|
||||
const githubUrl = readQueryStringValue(queryParams.githubUrl);
|
||||
const githubTokenId = readOptionalNumericQueryValue(queryParams.githubTokenId);
|
||||
const newGithubToken = readQueryStringValue(queryParams.newGithubToken) || null;
|
||||
|
||||
const authenticatedUser = (req as typeof req & { user?: AuthenticatedUser }).user;
|
||||
const userId = authenticatedUser?.id;
|
||||
if (userId === undefined || userId === null) {
|
||||
throw new AppError('Authenticated user is required', {
|
||||
code: 'AUTHENTICATION_REQUIRED',
|
||||
statusCode: 401,
|
||||
});
|
||||
}
|
||||
|
||||
cloneOperation = await startCloneProject(
|
||||
{
|
||||
workspacePath,
|
||||
githubUrl,
|
||||
githubTokenId,
|
||||
newGithubToken,
|
||||
userId,
|
||||
},
|
||||
{
|
||||
onProgress: (message) => {
|
||||
sendEvent('progress', { message });
|
||||
},
|
||||
onComplete: ({ project, message }) => {
|
||||
sendEvent('complete', { project, message });
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
await cloneOperation.waitForCompletion;
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: resolveRouteErrorMessage(error) });
|
||||
} finally {
|
||||
req.off('close', closeListener);
|
||||
if (!res.writableEnded) {
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/:projectId/taskmaster',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const taskMasterDetails = await getProjectTaskMaster(projectId);
|
||||
res.json(taskMasterDetails);
|
||||
}),
|
||||
);
|
||||
|
||||
router.put('/:projectId/rename', (req, res) => {
|
||||
try {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const { displayName } = req.body as { displayName?: unknown };
|
||||
updateProjectDisplayName(projectId, displayName);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error instanceof Error ? error.message : 'Failed to rename project' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post(
|
||||
'/:projectId/toggle-star',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const { isStarred } = toggleProjectStar(projectId);
|
||||
res.json({ success: true, isStarred });
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* - `force` not set / false: archive project in DB only (`isArchived` = 1; hidden from active list).
|
||||
* - `force=true`: remove DB row, delete session rows for that path, remove all `*.jsonl` under the Claude project dir.
|
||||
*/
|
||||
router.delete(
|
||||
'/:projectId',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const force = req.query.force === 'true';
|
||||
await deleteOrArchiveProject(projectId, force);
|
||||
res.json({ success: true });
|
||||
}),
|
||||
);
|
||||
|
||||
export default router;
|
||||
321
server/modules/projects/services/project-clone.service.ts
Normal file
321
server/modules/projects/services/project-clone.service.ts
Normal file
@@ -0,0 +1,321 @@
|
||||
import { spawn } from 'node:child_process';
|
||||
import { access, mkdir, rm } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { githubTokensDb } from '@/modules/database/index.js';
|
||||
import { createProject } from '@/modules/projects/services/project-management.service.js';
|
||||
import type { WorkspacePathValidationResult } from '@/shared/types.js';
|
||||
import { AppError, validateWorkspacePath } from '@/shared/utils.js';
|
||||
|
||||
type CloneProjectInput = {
|
||||
workspacePath: string;
|
||||
githubUrl: string;
|
||||
githubTokenId?: number | null;
|
||||
newGithubToken?: string | null;
|
||||
userId: number | string;
|
||||
};
|
||||
|
||||
type CloneCompletePayload = {
|
||||
project: Record<string, unknown>;
|
||||
message: string;
|
||||
};
|
||||
|
||||
type CloneProjectEventHandlers = {
|
||||
onProgress: (message: string) => void;
|
||||
onComplete: (payload: CloneCompletePayload) => void;
|
||||
};
|
||||
|
||||
type GitCloneProcess = {
|
||||
stdout: NodeJS.ReadableStream | null;
|
||||
stderr: NodeJS.ReadableStream | null;
|
||||
on(event: 'close', listener: (code: number | null) => void): void;
|
||||
on(event: 'error', listener: (error: NodeJS.ErrnoException) => void): void;
|
||||
kill(): void;
|
||||
};
|
||||
|
||||
type CloneProjectDependencies = {
|
||||
validatePath: (requestedPath: string) => Promise<WorkspacePathValidationResult>;
|
||||
ensureDirectory: (directoryPath: string) => Promise<void>;
|
||||
pathExists: (targetPath: string) => Promise<boolean>;
|
||||
removePath: (targetPath: string) => Promise<void>;
|
||||
getGithubTokenById: (
|
||||
tokenId: number,
|
||||
userId: number,
|
||||
) => Promise<{ github_token: string } | null>;
|
||||
spawnGitClone: (cloneUrl: string, clonePath: string) => GitCloneProcess;
|
||||
registerProject: (projectPath: string, customName: string) => Promise<{ project: Record<string, unknown> }>;
|
||||
logError: (message: string, error: unknown) => void;
|
||||
};
|
||||
|
||||
export type CloneProjectOperation = {
|
||||
waitForCompletion: Promise<void>;
|
||||
cancel: () => void;
|
||||
};
|
||||
|
||||
async function defaultPathExists(targetPath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(targetPath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function sanitizeGitError(message: string, token: string | null): string {
|
||||
if (!message || !token) {
|
||||
return message;
|
||||
}
|
||||
|
||||
const escapedToken = token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
return message.replace(new RegExp(escapedToken, 'g'), '***');
|
||||
}
|
||||
|
||||
function resolveCloneFailureMessage(lastError: string, sanitizedError: string): string {
|
||||
if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) {
|
||||
return 'Authentication failed. Please check your credentials.';
|
||||
}
|
||||
|
||||
if (lastError.includes('Repository not found')) {
|
||||
return 'Repository not found. Please check the URL and ensure you have access.';
|
||||
}
|
||||
|
||||
if (lastError.includes('already exists')) {
|
||||
return 'Directory already exists';
|
||||
}
|
||||
|
||||
if (sanitizedError) {
|
||||
return sanitizedError;
|
||||
}
|
||||
|
||||
return 'Git clone failed';
|
||||
}
|
||||
|
||||
function resolveErrorMessage(error: unknown): string {
|
||||
if (error instanceof AppError) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return 'Unexpected error';
|
||||
}
|
||||
|
||||
const defaultDependencies: CloneProjectDependencies = {
|
||||
validatePath: validateWorkspacePath,
|
||||
ensureDirectory: async (directoryPath: string): Promise<void> => {
|
||||
await mkdir(directoryPath, { recursive: true });
|
||||
},
|
||||
pathExists: defaultPathExists,
|
||||
removePath: async (targetPath: string): Promise<void> => {
|
||||
await rm(targetPath, { recursive: true, force: true });
|
||||
},
|
||||
getGithubTokenById: async (
|
||||
tokenId: number,
|
||||
userId: number,
|
||||
): Promise<{ github_token: string } | null> => {
|
||||
const tokenRow = githubTokensDb.getGithubTokenById(userId, tokenId) as
|
||||
| { github_token: string }
|
||||
| null;
|
||||
return tokenRow;
|
||||
},
|
||||
spawnGitClone: (cloneUrl: string, clonePath: string): GitCloneProcess =>
|
||||
spawn('git', ['clone', '--progress', '--', cloneUrl, clonePath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0',
|
||||
},
|
||||
}) as unknown as GitCloneProcess,
|
||||
registerProject: async (
|
||||
projectPath: string,
|
||||
customName: string,
|
||||
): Promise<{ project: Record<string, unknown> }> =>
|
||||
createProject({
|
||||
projectPath,
|
||||
customName,
|
||||
}) as Promise<{ project: Record<string, unknown> }>,
|
||||
logError: (message: string, error: unknown): void => {
|
||||
console.error(message, error);
|
||||
},
|
||||
};
|
||||
|
||||
export async function startCloneProject(
|
||||
input: CloneProjectInput,
|
||||
handlers: CloneProjectEventHandlers,
|
||||
dependencies: CloneProjectDependencies = defaultDependencies,
|
||||
): Promise<CloneProjectOperation> {
|
||||
const normalizedWorkspacePath = input.workspacePath.trim();
|
||||
const normalizedGithubUrl = input.githubUrl.trim();
|
||||
|
||||
if (!normalizedWorkspacePath) {
|
||||
throw new AppError('workspacePath and githubUrl are required', {
|
||||
code: 'WORKSPACE_PATH_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (!normalizedGithubUrl) {
|
||||
throw new AppError('workspacePath and githubUrl are required', {
|
||||
code: 'GITHUB_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (normalizedGithubUrl.startsWith('-')) {
|
||||
throw new AppError('Invalid githubUrl', {
|
||||
code: 'INVALID_GITHUB_URL',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const pathValidation = await dependencies.validatePath(normalizedWorkspacePath);
|
||||
if (!pathValidation.valid || !pathValidation.resolvedPath) {
|
||||
throw new AppError(pathValidation.error || 'Invalid workspace path', {
|
||||
code: 'INVALID_PROJECT_PATH',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const absolutePath = pathValidation.resolvedPath;
|
||||
await dependencies.ensureDirectory(absolutePath);
|
||||
|
||||
let githubToken: string | null = null;
|
||||
if (typeof input.githubTokenId === 'number') {
|
||||
const numericUserId =
|
||||
typeof input.userId === 'number' ? input.userId : Number.parseInt(String(input.userId), 10);
|
||||
if (Number.isNaN(numericUserId)) {
|
||||
throw new AppError('Authenticated user is required', {
|
||||
code: 'AUTHENTICATION_REQUIRED',
|
||||
statusCode: 401,
|
||||
});
|
||||
}
|
||||
|
||||
const token = await dependencies.getGithubTokenById(input.githubTokenId, numericUserId);
|
||||
if (!token) {
|
||||
throw new AppError('GitHub token not found', {
|
||||
code: 'GITHUB_TOKEN_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
githubToken = token.github_token;
|
||||
} else if (input.newGithubToken && input.newGithubToken.trim().length > 0) {
|
||||
githubToken = input.newGithubToken.trim();
|
||||
}
|
||||
|
||||
const sanitizedGithubUrl = normalizedGithubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = sanitizedGithubUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
if (await dependencies.pathExists(clonePath)) {
|
||||
throw new AppError(
|
||||
`Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.`,
|
||||
{
|
||||
code: 'CLONE_TARGET_ALREADY_EXISTS',
|
||||
statusCode: 409,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let cloneUrl = normalizedGithubUrl;
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(normalizedGithubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch {
|
||||
// SSH URLs cannot be represented by URL constructor and are used as-is.
|
||||
}
|
||||
}
|
||||
|
||||
handlers.onProgress(`Cloning into '${repoName}'...`);
|
||||
const gitProcess = dependencies.spawnGitClone(cloneUrl, clonePath);
|
||||
let lastError = '';
|
||||
|
||||
gitProcess.stdout?.on('data', (data: Buffer | string) => {
|
||||
const message = data.toString().trim();
|
||||
if (message) {
|
||||
handlers.onProgress(message);
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.stderr?.on('data', (data: Buffer | string) => {
|
||||
const message = data.toString().trim();
|
||||
lastError = message;
|
||||
if (message) {
|
||||
handlers.onProgress(message);
|
||||
}
|
||||
});
|
||||
|
||||
const waitForCompletion = new Promise<void>((resolve, reject) => {
|
||||
gitProcess.on('close', async (code) => {
|
||||
if (code === 0) {
|
||||
try {
|
||||
const createdProject = await dependencies.registerProject(clonePath, repoName);
|
||||
handlers.onComplete({
|
||||
project: createdProject.project,
|
||||
message: 'Repository cloned successfully',
|
||||
});
|
||||
resolve();
|
||||
} catch (error) {
|
||||
reject(
|
||||
new AppError(`Clone succeeded but failed to add project: ${resolveErrorMessage(error)}`, {
|
||||
code: 'CLONE_PROJECT_REGISTRATION_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const sanitizedError = sanitizeGitError(lastError, githubToken);
|
||||
const errorMessage = resolveCloneFailureMessage(lastError, sanitizedError);
|
||||
|
||||
try {
|
||||
await dependencies.removePath(clonePath);
|
||||
} catch (cleanupError) {
|
||||
dependencies.logError('Failed to clean up after clone failure:', cleanupError);
|
||||
}
|
||||
|
||||
reject(
|
||||
new AppError(errorMessage, {
|
||||
code: 'GIT_CLONE_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
reject(
|
||||
new AppError('Git is not installed or not in PATH', {
|
||||
code: 'GIT_NOT_FOUND',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
reject(
|
||||
new AppError(error.message, {
|
||||
code: 'GIT_EXECUTION_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
waitForCompletion,
|
||||
cancel: () => {
|
||||
gitProcess.kill();
|
||||
},
|
||||
};
|
||||
}
|
||||
75
server/modules/projects/services/project-delete.service.ts
Normal file
75
server/modules/projects/services/project-delete.service.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb, sessionsDb } from '@/modules/database/index.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
function uniqueJsonlPathsFromSessions(
|
||||
sessions: Array<{ jsonl_path: string | null }>,
|
||||
): string[] {
|
||||
const seen = new Set<string>();
|
||||
const result: string[] = [];
|
||||
|
||||
for (const row of sessions) {
|
||||
const raw = row.jsonl_path?.trim();
|
||||
if (!raw) {
|
||||
continue;
|
||||
}
|
||||
const absolute = path.isAbsolute(raw) ? path.normalize(raw) : path.resolve(raw);
|
||||
if (seen.has(absolute)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(absolute);
|
||||
result.push(absolute);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function unlinkJsonlIfExists(filePath: string): Promise<void> {
|
||||
try {
|
||||
await fs.unlink(filePath);
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return;
|
||||
}
|
||||
console.warn(`[project-delete] Failed to remove ${filePath}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads all session rows for the project path and removes each distinct `jsonl_path` file on disk.
|
||||
*/
|
||||
export async function deleteSessionJsonlFilesForProjectPath(projectPath: string): Promise<void> {
|
||||
const sessions = sessionsDb.getSessionsByProjectPath(projectPath);
|
||||
const paths = uniqueJsonlPathsFromSessions(sessions);
|
||||
|
||||
for (const filePath of paths) {
|
||||
await unlinkJsonlIfExists(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* - **Soft delete** (`force` false): set `isArchived` on the `projects` row (hide from the active list; DB only).
|
||||
* - **Force** (`force` true): for each session row for that `project_path`, delete the file at `jsonl_path`
|
||||
* (when set), then remove session rows and the `projects` row.
|
||||
*/
|
||||
export async function deleteOrArchiveProject(projectId: string, force: boolean): Promise<void> {
|
||||
const row = projectsDb.getProjectById(projectId);
|
||||
if (!row) {
|
||||
throw new AppError(`Unknown projectId: ${projectId}`, {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
if (!force) {
|
||||
projectsDb.updateProjectIsArchivedById(projectId, true);
|
||||
return;
|
||||
}
|
||||
|
||||
await deleteSessionJsonlFilesForProjectPath(row.project_path);
|
||||
sessionsDb.deleteSessionsByProjectPath(row.project_path);
|
||||
projectsDb.deleteProjectById(projectId);
|
||||
}
|
||||
150
server/modules/projects/services/project-management.service.ts
Normal file
150
server/modules/projects/services/project-management.service.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import type {
|
||||
CreateProjectPathResult,
|
||||
ProjectRepositoryRow,
|
||||
WorkspacePathValidationResult,
|
||||
} from '@/shared/types.js';
|
||||
import { AppError, normalizeProjectPath, validateWorkspacePath } from '@/shared/utils.js';
|
||||
|
||||
type CreateProjectInput = {
|
||||
projectPath: string;
|
||||
customName?: string | null;
|
||||
};
|
||||
|
||||
type CreateProjectDependencies = {
|
||||
validatePath: (projectPath: string) => Promise<WorkspacePathValidationResult>;
|
||||
ensureWorkspaceDirectory: (projectPath: string) => Promise<void>;
|
||||
persistProjectPath: (projectPath: string, customName: string | null) => CreateProjectPathResult;
|
||||
getProjectByPath: (projectPath: string) => ProjectRepositoryRow | null;
|
||||
};
|
||||
|
||||
type ProjectApiView = {
|
||||
projectId: string;
|
||||
path: string;
|
||||
fullPath: string;
|
||||
displayName: string;
|
||||
customName: string | null;
|
||||
isArchived: boolean;
|
||||
isStarred: boolean;
|
||||
sessions: [];
|
||||
cursorSessions: [];
|
||||
codexSessions: [];
|
||||
geminiSessions: [];
|
||||
sessionMeta: {
|
||||
hasMore: false;
|
||||
total: 0;
|
||||
};
|
||||
};
|
||||
|
||||
type CreateProjectServiceResult = {
|
||||
outcome: 'created' | 'reactivated_archived';
|
||||
project: ProjectApiView;
|
||||
};
|
||||
|
||||
const defaultDependencies: CreateProjectDependencies = {
|
||||
validatePath: validateWorkspacePath,
|
||||
ensureWorkspaceDirectory: async (projectPath: string): Promise<void> => {
|
||||
await fs.mkdir(projectPath, { recursive: true });
|
||||
const directoryStats = await fs.stat(projectPath);
|
||||
if (!directoryStats.isDirectory()) {
|
||||
throw new AppError('Path exists but is not a directory', {
|
||||
code: 'PROJECT_PATH_NOT_DIRECTORY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
},
|
||||
persistProjectPath: (projectPath: string, customName: string | null): CreateProjectPathResult =>
|
||||
projectsDb.createProjectPath(projectPath, customName),
|
||||
getProjectByPath: (projectPath: string): ProjectRepositoryRow | null =>
|
||||
projectsDb.getProjectPath(projectPath),
|
||||
};
|
||||
|
||||
function resolveDisplayName(customName: string | null | undefined, projectPath: string): string {
|
||||
const trimmedCustomName = typeof customName === 'string' ? customName.trim() : '';
|
||||
if (trimmedCustomName.length > 0) {
|
||||
return trimmedCustomName;
|
||||
}
|
||||
|
||||
return path.basename(projectPath) || projectPath;
|
||||
}
|
||||
|
||||
function mapProjectRowToApiView(projectRow: ProjectRepositoryRow): ProjectApiView {
|
||||
return {
|
||||
projectId: projectRow.project_id,
|
||||
path: projectRow.project_path,
|
||||
fullPath: projectRow.project_path,
|
||||
displayName: resolveDisplayName(projectRow.custom_project_name, projectRow.project_path),
|
||||
customName: projectRow.custom_project_name,
|
||||
isArchived: Boolean(projectRow.isArchived),
|
||||
isStarred: Boolean(projectRow.isStarred),
|
||||
sessions: [],
|
||||
cursorSessions: [],
|
||||
codexSessions: [],
|
||||
geminiSessions: [],
|
||||
sessionMeta: {
|
||||
hasMore: false,
|
||||
total: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function createProject(
|
||||
input: CreateProjectInput,
|
||||
dependencies: CreateProjectDependencies = defaultDependencies,
|
||||
): Promise<CreateProjectServiceResult> {
|
||||
const normalizedPath = normalizeProjectPath(input.projectPath || '');
|
||||
if (!normalizedPath) {
|
||||
throw new AppError('path is required', {
|
||||
code: 'PROJECT_PATH_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const pathValidation = await dependencies.validatePath(normalizedPath);
|
||||
if (!pathValidation.valid || !pathValidation.resolvedPath) {
|
||||
throw new AppError('Invalid project path', {
|
||||
code: 'INVALID_PROJECT_PATH',
|
||||
statusCode: 400,
|
||||
details: pathValidation.error ?? 'Path validation failed',
|
||||
});
|
||||
}
|
||||
|
||||
const resolvedProjectPath = normalizeProjectPath(pathValidation.resolvedPath);
|
||||
await dependencies.ensureWorkspaceDirectory(resolvedProjectPath);
|
||||
|
||||
const normalizedCustomName = resolveDisplayName(input.customName ?? null, resolvedProjectPath);
|
||||
const persistedProject = dependencies.persistProjectPath(resolvedProjectPath, normalizedCustomName);
|
||||
|
||||
if (persistedProject.outcome === 'active_conflict') {
|
||||
throw new AppError('Project path already exists and is active', {
|
||||
code: 'PROJECT_ALREADY_EXISTS',
|
||||
statusCode: 409,
|
||||
details: `Project path already exists: ${resolvedProjectPath}`,
|
||||
});
|
||||
}
|
||||
|
||||
const projectRow = persistedProject.project ?? dependencies.getProjectByPath(resolvedProjectPath);
|
||||
if (!projectRow) {
|
||||
throw new AppError('Failed to resolve project after creation', {
|
||||
code: 'PROJECT_CREATE_FAILED',
|
||||
statusCode: 500,
|
||||
});
|
||||
}
|
||||
|
||||
// Archived rows intentionally remain archived when reused, as requested.
|
||||
return {
|
||||
outcome: persistedProject.outcome,
|
||||
project: mapProjectRowToApiView(projectRow),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets `projects.custom_project_name` for the given `projectId` (or clears it when empty).
|
||||
*/
|
||||
export function updateProjectDisplayName(projectId: string, newDisplayName: unknown): void {
|
||||
const trimmed = typeof newDisplayName === 'string' ? newDisplayName.trim() : '';
|
||||
projectsDb.updateCustomProjectNameById(projectId, trimmed.length > 0 ? trimmed : null);
|
||||
}
|
||||
78
server/modules/projects/services/project-star.service.ts
Normal file
78
server/modules/projects/services/project-star.service.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type ToggleProjectStarResult = {
|
||||
isStarred: boolean;
|
||||
};
|
||||
|
||||
type ApplyLegacyStarredProjectIdsResult = {
|
||||
updated: number;
|
||||
};
|
||||
|
||||
function normalizeProjectId(projectId: string): string {
|
||||
return projectId.trim();
|
||||
}
|
||||
|
||||
function uniqueProjectIds(projectIds: string[]): string[] {
|
||||
const uniqueIds = new Set<string>();
|
||||
for (const projectId of projectIds) {
|
||||
const normalizedProjectId = normalizeProjectId(projectId);
|
||||
if (!normalizedProjectId) {
|
||||
continue;
|
||||
}
|
||||
uniqueIds.add(normalizedProjectId);
|
||||
}
|
||||
return [...uniqueIds];
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies legacy `localStorage` stars keyed by DB `projectId` onto `projects.isStarred`.
|
||||
*
|
||||
* The operation is idempotent: already-starred projects are ignored, unknown ids are skipped.
|
||||
*/
|
||||
export function applyLegacyStarredProjectIds(projectIds: string[]): ApplyLegacyStarredProjectIdsResult {
|
||||
const normalizedProjectIds = uniqueProjectIds(projectIds);
|
||||
let updated = 0;
|
||||
|
||||
for (const projectId of normalizedProjectIds) {
|
||||
const project = projectsDb.getProjectById(projectId);
|
||||
if (!project) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Boolean(project.isStarred)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
projectsDb.updateProjectIsStarredById(projectId, true);
|
||||
updated += 1;
|
||||
}
|
||||
|
||||
return { updated };
|
||||
}
|
||||
|
||||
/**
|
||||
* Flips `projects.isStarred` for one project and returns the new state.
|
||||
*/
|
||||
export function toggleProjectStar(projectId: string): ToggleProjectStarResult {
|
||||
const normalizedProjectId = normalizeProjectId(projectId);
|
||||
if (!normalizedProjectId) {
|
||||
throw new AppError('projectId is required', {
|
||||
code: 'PROJECT_ID_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const project = projectsDb.getProjectById(normalizedProjectId);
|
||||
if (!project) {
|
||||
throw new AppError('Project not found', {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const nextStarredState = !Boolean(project.isStarred);
|
||||
projectsDb.updateProjectIsStarredById(normalizedProjectId, nextStarredState);
|
||||
|
||||
return { isStarred: nextStarredState };
|
||||
}
|
||||
@@ -0,0 +1,248 @@
|
||||
import { access, readFile, stat } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type TaskMasterTask = {
|
||||
status?: string;
|
||||
subtasks?: Array<{
|
||||
status?: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
type TaskMasterMetadata =
|
||||
| {
|
||||
taskCount: number;
|
||||
subtaskCount: number;
|
||||
completed: number;
|
||||
pending: number;
|
||||
inProgress: number;
|
||||
review: number;
|
||||
completionPercentage: number;
|
||||
lastModified: string;
|
||||
}
|
||||
| {
|
||||
error: string;
|
||||
}
|
||||
| null;
|
||||
|
||||
type TaskMasterDetectionResult = {
|
||||
hasTaskmaster: boolean;
|
||||
hasEssentialFiles?: boolean;
|
||||
files?: Record<string, boolean>;
|
||||
metadata?: TaskMasterMetadata;
|
||||
path?: string;
|
||||
reason?: string;
|
||||
};
|
||||
|
||||
type NormalizedTaskMasterInfo = {
|
||||
hasTaskmaster: boolean;
|
||||
hasEssentialFiles: boolean;
|
||||
metadata: TaskMasterMetadata;
|
||||
status: 'configured' | 'not-configured';
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterByIdResult = {
|
||||
projectId: string;
|
||||
projectPath: string;
|
||||
taskmaster: NormalizedTaskMasterInfo;
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterDependencies = {
|
||||
resolveProjectPathById: (projectId: string) => string | null;
|
||||
detectTaskMasterFolder: (projectPath: string) => Promise<TaskMasterDetectionResult>;
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterResolver = (projectId: string) => Promise<GetProjectTaskMasterByIdResult | null>;
|
||||
|
||||
function extractTasksFromJson(tasksData: unknown): TaskMasterTask[] {
|
||||
if (!tasksData || typeof tasksData !== 'object') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const legacyTasks = (tasksData as { tasks?: unknown }).tasks;
|
||||
if (Array.isArray(legacyTasks)) {
|
||||
return legacyTasks as TaskMasterTask[];
|
||||
}
|
||||
|
||||
const taggedTaskCollections: TaskMasterTask[] = [];
|
||||
for (const tagValue of Object.values(tasksData)) {
|
||||
if (!tagValue || typeof tagValue !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tagTasks = (tagValue as { tasks?: unknown }).tasks;
|
||||
if (Array.isArray(tagTasks)) {
|
||||
taggedTaskCollections.push(...(tagTasks as TaskMasterTask[]));
|
||||
}
|
||||
}
|
||||
|
||||
return taggedTaskCollections;
|
||||
}
|
||||
|
||||
async function detectTaskMasterFolder(projectPath: string): Promise<TaskMasterDetectionResult> {
|
||||
try {
|
||||
const taskMasterPath = path.join(projectPath, '.taskmaster');
|
||||
|
||||
try {
|
||||
const taskMasterStats = await stat(taskMasterPath);
|
||||
if (!taskMasterStats.isDirectory()) {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster exists but is not a directory',
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
const fileError = error as NodeJS.ErrnoException;
|
||||
if (fileError.code === 'ENOENT') {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster directory not found',
|
||||
};
|
||||
}
|
||||
|
||||
throw fileError;
|
||||
}
|
||||
|
||||
const keyFiles = ['tasks/tasks.json', 'config.json'];
|
||||
const fileStatus: Record<string, boolean> = {};
|
||||
let hasEssentialFiles = true;
|
||||
|
||||
for (const fileName of keyFiles) {
|
||||
const absoluteFilePath = path.join(taskMasterPath, fileName);
|
||||
try {
|
||||
await access(absoluteFilePath);
|
||||
fileStatus[fileName] = true;
|
||||
} catch {
|
||||
fileStatus[fileName] = false;
|
||||
if (fileName === 'tasks/tasks.json') {
|
||||
hasEssentialFiles = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let taskMetadata: TaskMasterMetadata = null;
|
||||
if (fileStatus['tasks/tasks.json']) {
|
||||
const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json');
|
||||
try {
|
||||
const tasksContent = await readFile(tasksPath, 'utf8');
|
||||
const parsedTasksJson = JSON.parse(tasksContent) as unknown;
|
||||
const tasks = extractTasksFromJson(parsedTasksJson);
|
||||
|
||||
const stats = tasks.reduce(
|
||||
(accumulator, currentTask) => {
|
||||
accumulator.total += 1;
|
||||
const normalizedTaskStatus = currentTask.status || 'pending';
|
||||
accumulator.byStatus[normalizedTaskStatus] = (accumulator.byStatus[normalizedTaskStatus] || 0) + 1;
|
||||
|
||||
if (Array.isArray(currentTask.subtasks)) {
|
||||
for (const subtask of currentTask.subtasks) {
|
||||
accumulator.subtotalTasks += 1;
|
||||
const normalizedSubtaskStatus = subtask.status || 'pending';
|
||||
accumulator.subtaskByStatus[normalizedSubtaskStatus] =
|
||||
(accumulator.subtaskByStatus[normalizedSubtaskStatus] || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return accumulator;
|
||||
},
|
||||
{
|
||||
total: 0,
|
||||
subtotalTasks: 0,
|
||||
byStatus: {} as Record<string, number>,
|
||||
subtaskByStatus: {} as Record<string, number>,
|
||||
},
|
||||
);
|
||||
|
||||
const tasksStat = await stat(tasksPath);
|
||||
taskMetadata = {
|
||||
taskCount: stats.total,
|
||||
subtaskCount: stats.subtotalTasks,
|
||||
completed: stats.byStatus.done || 0,
|
||||
pending: stats.byStatus.pending || 0,
|
||||
inProgress: stats.byStatus['in-progress'] || 0,
|
||||
review: stats.byStatus.review || 0,
|
||||
completionPercentage: stats.total > 0 ? Math.round(((stats.byStatus.done || 0) / stats.total) * 100) : 0,
|
||||
lastModified: tasksStat.mtime.toISOString(),
|
||||
};
|
||||
} catch (parseError) {
|
||||
console.warn('Failed to parse tasks.json:', (parseError as Error).message);
|
||||
taskMetadata = {
|
||||
error: 'Failed to parse tasks.json',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles,
|
||||
files: fileStatus,
|
||||
metadata: taskMetadata,
|
||||
path: taskMasterPath,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error detecting TaskMaster folder:', error);
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: `Error checking directory: ${(error as Error).message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeTaskMasterInfo(taskMasterResult: TaskMasterDetectionResult | null = null): NormalizedTaskMasterInfo {
|
||||
const hasTaskmaster = Boolean(taskMasterResult?.hasTaskmaster);
|
||||
const hasEssentialFiles = Boolean(taskMasterResult?.hasEssentialFiles);
|
||||
|
||||
return {
|
||||
hasTaskmaster,
|
||||
hasEssentialFiles,
|
||||
metadata: taskMasterResult?.metadata ?? null,
|
||||
status: hasTaskmaster && hasEssentialFiles ? 'configured' : 'not-configured',
|
||||
};
|
||||
}
|
||||
|
||||
const defaultDependencies: GetProjectTaskMasterDependencies = {
|
||||
resolveProjectPathById: (projectId: string): string | null => projectsDb.getProjectPathById(projectId),
|
||||
detectTaskMasterFolder,
|
||||
};
|
||||
|
||||
export async function getProjectTaskMasterById(
|
||||
projectId: string,
|
||||
dependencies: GetProjectTaskMasterDependencies = defaultDependencies,
|
||||
): Promise<GetProjectTaskMasterByIdResult | null> {
|
||||
const projectPath = dependencies.resolveProjectPathById(projectId);
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const taskMasterResult = await dependencies.detectTaskMasterFolder(projectPath);
|
||||
return {
|
||||
projectId,
|
||||
projectPath,
|
||||
taskmaster: normalizeTaskMasterInfo(taskMasterResult),
|
||||
};
|
||||
}
|
||||
|
||||
export async function getProjectTaskMaster(
|
||||
projectId: string,
|
||||
resolveById: GetProjectTaskMasterResolver = getProjectTaskMasterById,
|
||||
): Promise<GetProjectTaskMasterByIdResult> {
|
||||
const normalizedProjectId = projectId.trim();
|
||||
if (!normalizedProjectId) {
|
||||
throw new AppError('projectId is required', {
|
||||
code: 'PROJECT_ID_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const taskMasterDetails = await resolveById(normalizedProjectId);
|
||||
if (!taskMasterDetails) {
|
||||
throw new AppError('Project not found', {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
return taskMasterDetails;
|
||||
}
|
||||
@@ -0,0 +1,285 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb, sessionsDb } from '@/modules/database/index.js';
|
||||
import { sessionSynchronizerService } from '@/modules/providers/index.js';
|
||||
import { WS_OPEN_STATE, connectedClients } from '@/modules/websocket/index.js';
|
||||
import type { RealtimeClientConnection } from '@/shared/types.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type SessionSummary = {
|
||||
id: string;
|
||||
summary: string;
|
||||
messageCount: number;
|
||||
lastActivity: string;
|
||||
};
|
||||
|
||||
type SessionsByProvider = Record<'claude' | 'cursor' | 'codex' | 'gemini', SessionSummary[]>;
|
||||
|
||||
type SessionRepositoryRow = {
|
||||
provider: string;
|
||||
session_id: string;
|
||||
custom_name?: string | null;
|
||||
updated_at?: string | null;
|
||||
created_at?: string | null;
|
||||
};
|
||||
|
||||
export type ProjectListItem = {
|
||||
projectId: string;
|
||||
path: string;
|
||||
displayName: string;
|
||||
fullPath: string;
|
||||
isStarred: boolean;
|
||||
sessions: SessionSummary[];
|
||||
cursorSessions: SessionSummary[];
|
||||
codexSessions: SessionSummary[];
|
||||
geminiSessions: SessionSummary[];
|
||||
sessionMeta: {
|
||||
hasMore: boolean;
|
||||
total: number;
|
||||
};
|
||||
};
|
||||
|
||||
type ProgressUpdate = {
|
||||
phase: 'loading' | 'complete';
|
||||
current: number;
|
||||
total: number;
|
||||
currentProject?: string;
|
||||
};
|
||||
|
||||
type GetProjectsWithSessionsOptions = {
|
||||
skipSynchronization?: boolean;
|
||||
sessionsLimit?: number;
|
||||
sessionsOffset?: number;
|
||||
};
|
||||
|
||||
type SessionPaginationOptions = {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
type ProjectSessionsPageResult = {
|
||||
sessionsByProvider: SessionsByProvider;
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
};
|
||||
|
||||
export type ProjectSessionsPageApiView = {
|
||||
projectId: string;
|
||||
sessions: SessionSummary[];
|
||||
cursorSessions: SessionSummary[];
|
||||
codexSessions: SessionSummary[];
|
||||
geminiSessions: SessionSummary[];
|
||||
sessionMeta: {
|
||||
hasMore: boolean;
|
||||
total: number;
|
||||
};
|
||||
};
|
||||
|
||||
const DEFAULT_PROJECT_SESSIONS_PAGE_SIZE = 20;
|
||||
const MAX_PROJECT_SESSIONS_PAGE_SIZE = 200;
|
||||
|
||||
/**
|
||||
* Generate better display name from path.
|
||||
*/
|
||||
export async function generateDisplayName(projectName: string, actualProjectDir: string | null = null): Promise<string> {
|
||||
// Use actual project directory if provided, otherwise decode from project name.
|
||||
const projectPath = actualProjectDir || projectName.replace(/-/g, '/');
|
||||
|
||||
// Try to read package.json from the project path.
|
||||
try {
|
||||
const packageJsonPath = path.join(projectPath, 'package.json');
|
||||
const packageData = await fs.readFile(packageJsonPath, 'utf8');
|
||||
const packageJson = JSON.parse(packageData) as { name?: string };
|
||||
|
||||
// Return the name from package.json if it exists.
|
||||
if (packageJson.name) {
|
||||
return packageJson.name;
|
||||
}
|
||||
} catch {
|
||||
// Fall back to path-based naming if package.json doesn't exist or can't be read.
|
||||
}
|
||||
|
||||
// If it starts with /, it's an absolute path.
|
||||
if (projectPath.startsWith('/')) {
|
||||
const parts = projectPath.split('/').filter(Boolean);
|
||||
// Return only the last folder name.
|
||||
return parts[parts.length - 1] || projectPath;
|
||||
}
|
||||
|
||||
return projectPath;
|
||||
}
|
||||
|
||||
function normalizeSessionPagination(options: SessionPaginationOptions = {}): { limit: number; offset: number } {
|
||||
const rawLimit = Number.isFinite(options.limit) ? Math.floor(Number(options.limit)) : DEFAULT_PROJECT_SESSIONS_PAGE_SIZE;
|
||||
const rawOffset = Number.isFinite(options.offset) ? Math.floor(Number(options.offset)) : 0;
|
||||
|
||||
return {
|
||||
limit: Math.min(Math.max(1, rawLimit), MAX_PROJECT_SESSIONS_PAGE_SIZE),
|
||||
offset: Math.max(0, rawOffset),
|
||||
};
|
||||
}
|
||||
|
||||
function mapSessionRowToSummary(row: SessionRepositoryRow): SessionSummary {
|
||||
return {
|
||||
id: row.session_id,
|
||||
summary: row.custom_name || '',
|
||||
messageCount: 0,
|
||||
lastActivity: row.updated_at ?? row.created_at ?? new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
function bucketSessionRowsByProvider(rows: SessionRepositoryRow[]): SessionsByProvider {
|
||||
const byProvider: SessionsByProvider = {
|
||||
claude: [],
|
||||
cursor: [],
|
||||
codex: [],
|
||||
gemini: [],
|
||||
};
|
||||
|
||||
for (const row of rows) {
|
||||
const provider = row.provider as keyof SessionsByProvider;
|
||||
const bucket = byProvider[provider];
|
||||
if (!bucket) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bucket.push(mapSessionRowToSummary(row));
|
||||
}
|
||||
|
||||
return byProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads one paginated project session slice from the DB and groups rows by provider.
|
||||
*/
|
||||
function readProjectSessionsPageByPath(
|
||||
projectPath: string,
|
||||
options: SessionPaginationOptions = {},
|
||||
): ProjectSessionsPageResult {
|
||||
const pagination = normalizeSessionPagination(options);
|
||||
const rows = sessionsDb.getSessionsByProjectPathPage(
|
||||
projectPath,
|
||||
pagination.limit,
|
||||
pagination.offset,
|
||||
) as SessionRepositoryRow[];
|
||||
const total = sessionsDb.countSessionsByProjectPath(projectPath);
|
||||
|
||||
return {
|
||||
sessionsByProvider: bucketSessionRowsByProvider(rows),
|
||||
total,
|
||||
hasMore: pagination.offset + rows.length < total,
|
||||
};
|
||||
}
|
||||
|
||||
// Broadcast progress to all connected WebSocket clients
|
||||
function broadcastProgress(progress: ProgressUpdate) {
|
||||
const message = JSON.stringify({
|
||||
type: 'loading_progress',
|
||||
...progress,
|
||||
});
|
||||
|
||||
connectedClients.forEach((client: RealtimeClientConnection) => {
|
||||
if (client.readyState === WS_OPEN_STATE) {
|
||||
client.send(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads all projects from DB and returns provider-bucketed session summaries.
|
||||
*/
|
||||
export async function getProjectsWithSessions(
|
||||
options: GetProjectsWithSessionsOptions = {}
|
||||
): Promise<ProjectListItem[]> {
|
||||
if (!options.skipSynchronization) {
|
||||
await sessionSynchronizerService.synchronizeSessions();
|
||||
}
|
||||
|
||||
const projectRows = projectsDb.getProjectPaths() as Array<{
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name?: string | null;
|
||||
isStarred?: number;
|
||||
}>;
|
||||
const totalProjects = projectRows.length;
|
||||
const projects: ProjectListItem[] = [];
|
||||
let processedProjects = 0;
|
||||
|
||||
for (const row of projectRows) {
|
||||
processedProjects += 1;
|
||||
|
||||
const projectId = row.project_id;
|
||||
const projectPath = row.project_path;
|
||||
|
||||
broadcastProgress({
|
||||
phase: 'loading',
|
||||
current: processedProjects,
|
||||
total: totalProjects,
|
||||
currentProject: projectPath,
|
||||
});
|
||||
|
||||
const displayName =
|
||||
row.custom_project_name && row.custom_project_name.trim().length > 0
|
||||
? row.custom_project_name
|
||||
: await generateDisplayName(path.basename(projectPath) || projectPath, projectPath);
|
||||
|
||||
const sessionsPage = readProjectSessionsPageByPath(projectPath, {
|
||||
limit: options.sessionsLimit,
|
||||
offset: options.sessionsOffset,
|
||||
});
|
||||
|
||||
projects.push({
|
||||
projectId,
|
||||
path: projectPath,
|
||||
displayName,
|
||||
fullPath: projectPath,
|
||||
isStarred: Boolean(row.isStarred),
|
||||
sessions: sessionsPage.sessionsByProvider.claude,
|
||||
cursorSessions: sessionsPage.sessionsByProvider.cursor,
|
||||
codexSessions: sessionsPage.sessionsByProvider.codex,
|
||||
geminiSessions: sessionsPage.sessionsByProvider.gemini,
|
||||
sessionMeta: {
|
||||
hasMore: sessionsPage.hasMore,
|
||||
total: sessionsPage.total,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
broadcastProgress({
|
||||
phase: 'complete',
|
||||
current: totalProjects,
|
||||
total: totalProjects,
|
||||
});
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads one paginated session slice for a specific project id.
|
||||
*/
|
||||
export async function getProjectSessionsPage(
|
||||
projectId: string,
|
||||
options: SessionPaginationOptions = {},
|
||||
): Promise<ProjectSessionsPageApiView> {
|
||||
const projectRow = projectsDb.getProjectById(projectId);
|
||||
if (!projectRow) {
|
||||
throw new AppError(`Project "${projectId}" was not found.`, {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const sessionsPage = readProjectSessionsPageByPath(projectRow.project_path, options);
|
||||
return {
|
||||
projectId: projectRow.project_id,
|
||||
sessions: sessionsPage.sessionsByProvider.claude,
|
||||
cursorSessions: sessionsPage.sessionsByProvider.cursor,
|
||||
codexSessions: sessionsPage.sessionsByProvider.codex,
|
||||
geminiSessions: sessionsPage.sessionsByProvider.gemini,
|
||||
sessionMeta: {
|
||||
hasMore: sessionsPage.hasMore,
|
||||
total: sessionsPage.total,
|
||||
},
|
||||
};
|
||||
}
|
||||
183
server/modules/projects/tests/project-clone.service.test.ts
Normal file
183
server/modules/projects/tests/project-clone.service.test.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import path from 'node:path';
|
||||
import { PassThrough } from 'node:stream';
|
||||
import test from 'node:test';
|
||||
|
||||
import { startCloneProject } from '@/modules/projects/services/project-clone.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type TestDependencies = Parameters<typeof startCloneProject>[2];
|
||||
|
||||
function buildDependencies(overrides: Partial<NonNullable<TestDependencies>> = {}): NonNullable<TestDependencies> {
|
||||
return {
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/root' }),
|
||||
ensureDirectory: async () => undefined,
|
||||
pathExists: async () => false,
|
||||
removePath: async () => undefined,
|
||||
getGithubTokenById: async () => ({ github_token: 'token-value' }),
|
||||
spawnGitClone: () => {
|
||||
throw new Error('spawnGitClone should be overridden in this test');
|
||||
},
|
||||
registerProject: async () => ({ project: { projectId: 'project-1' } }),
|
||||
logError: () => undefined,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createMockGitProcess() {
|
||||
const emitter = new EventEmitter() as EventEmitter & {
|
||||
stdout: PassThrough;
|
||||
stderr: PassThrough;
|
||||
kill: () => void;
|
||||
};
|
||||
|
||||
emitter.stdout = new PassThrough();
|
||||
emitter.stderr = new PassThrough();
|
||||
emitter.kill = () => {
|
||||
emitter.emit('close', null);
|
||||
};
|
||||
|
||||
return emitter;
|
||||
}
|
||||
|
||||
test('startCloneProject rejects when workspace path is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '',
|
||||
githubUrl: 'https://github.com/example/repo',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'WORKSPACE_PATH_REQUIRED');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects when github URL is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: '',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'GITHUB_URL_REQUIRED');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects github URL values that begin with option prefixes', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: '--upload-pack=malicious',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'INVALID_GITHUB_URL');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects when selected github token does not exist', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: 'https://github.com/example/repo',
|
||||
githubTokenId: 12,
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies({
|
||||
getGithubTokenById: async () => null,
|
||||
}),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'GITHUB_TOKEN_NOT_FOUND');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject completes and emits complete payload when git exits successfully', async () => {
|
||||
const gitProcess = createMockGitProcess();
|
||||
const progressMessages: string[] = [];
|
||||
let completePayload: { project: Record<string, unknown>; message: string } | null = null;
|
||||
let capturedProjectPath = '';
|
||||
let capturedCustomName = '';
|
||||
|
||||
const operation = await startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: 'https://github.com/example/repo.git',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: (message) => {
|
||||
progressMessages.push(message);
|
||||
},
|
||||
onComplete: (payload: { project: Record<string, unknown>; message: string }) => {
|
||||
completePayload = payload;
|
||||
},
|
||||
},
|
||||
buildDependencies({
|
||||
spawnGitClone: () => gitProcess as any,
|
||||
registerProject: async (projectPath, customName) => {
|
||||
capturedProjectPath = projectPath;
|
||||
capturedCustomName = customName;
|
||||
return { project: { projectId: 'project-1', path: projectPath } };
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
gitProcess.emit('close', 0);
|
||||
await operation.waitForCompletion;
|
||||
|
||||
assert.ok(progressMessages.some((message) => message.includes("Cloning into 'repo'")));
|
||||
assert.equal(capturedCustomName, 'repo');
|
||||
assert.equal(path.basename(capturedProjectPath), 'repo');
|
||||
assert.notEqual(completePayload, null);
|
||||
const resolvedCompletePayload = completePayload as unknown as {
|
||||
project: Record<string, unknown>;
|
||||
message: string;
|
||||
};
|
||||
assert.equal(resolvedCompletePayload.message, 'Repository cloned successfully');
|
||||
assert.equal((resolvedCompletePayload.project.projectId as string) || '', 'project-1');
|
||||
});
|
||||
117
server/modules/projects/tests/project-management.service.test.ts
Normal file
117
server/modules/projects/tests/project-management.service.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { createProject } from '@/modules/projects/services/project-management.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
const projectRow = {
|
||||
project_id: 'project-1',
|
||||
project_path: '/workspace/my-project',
|
||||
custom_project_name: 'my-project',
|
||||
isStarred: 0,
|
||||
isArchived: 0,
|
||||
};
|
||||
|
||||
test('createProject throws when project path is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () => createProject({ projectPath: '' }),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_PATH_REQUIRED');
|
||||
assert.equal(error.statusCode, 400);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject throws when path validation fails', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
createProject(
|
||||
{ projectPath: '/invalid/path' },
|
||||
{
|
||||
validatePath: async () => ({ valid: false, error: 'blocked path' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({ outcome: 'created', project: projectRow }),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'INVALID_PROJECT_PATH');
|
||||
assert.equal(error.statusCode, 400);
|
||||
assert.equal(error.details, 'blocked path');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject throws conflict when active project path already exists', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
createProject(
|
||||
{ projectPath: '/workspace/my-project' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({ outcome: 'active_conflict', project: projectRow }),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_ALREADY_EXISTS');
|
||||
assert.equal(error.statusCode, 409);
|
||||
assert.equal(error.details, 'Project path already exists: /workspace/my-project');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject falls back to directory name when custom name is not provided', async () => {
|
||||
let capturedCustomName: string | null = null;
|
||||
|
||||
const result = await createProject(
|
||||
{ projectPath: '/workspace/my-project', customName: '' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: (_projectPath, customName) => {
|
||||
capturedCustomName = customName;
|
||||
return {
|
||||
outcome: 'created',
|
||||
project: {
|
||||
...projectRow,
|
||||
custom_project_name: customName,
|
||||
},
|
||||
};
|
||||
},
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(capturedCustomName, 'my-project');
|
||||
assert.equal(result.outcome, 'created');
|
||||
assert.equal(result.project.displayName, 'my-project');
|
||||
});
|
||||
|
||||
test('createProject returns archived reuse outcome when archived row is reused', async () => {
|
||||
const result = await createProject(
|
||||
{ projectPath: '/workspace/my-project' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({
|
||||
outcome: 'reactivated_archived',
|
||||
project: {
|
||||
...projectRow,
|
||||
isArchived: 1,
|
||||
},
|
||||
}),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(result.outcome, 'reactivated_archived');
|
||||
assert.equal(result.project.isArchived, true);
|
||||
});
|
||||
123
server/modules/projects/tests/project-star.service.test.ts
Normal file
123
server/modules/projects/tests/project-star.service.test.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import { applyLegacyStarredProjectIds, toggleProjectStar } from '@/modules/projects/services/project-star.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type ProjectRow = {
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name: string | null;
|
||||
isStarred: number;
|
||||
isArchived: number;
|
||||
};
|
||||
|
||||
test('toggleProjectStar throws when projectId is missing', () => {
|
||||
assert.throws(
|
||||
() => toggleProjectStar(' '),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError
|
||||
&& error.code === 'PROJECT_ID_REQUIRED'
|
||||
&& error.statusCode === 400,
|
||||
);
|
||||
});
|
||||
|
||||
test('toggleProjectStar throws when project does not exist', () => {
|
||||
const originalGetProjectById = projectsDb.getProjectById;
|
||||
try {
|
||||
projectsDb.getProjectById = () => null;
|
||||
assert.throws(
|
||||
() => toggleProjectStar('project-1'),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError
|
||||
&& error.code === 'PROJECT_NOT_FOUND'
|
||||
&& error.statusCode === 404,
|
||||
);
|
||||
} finally {
|
||||
projectsDb.getProjectById = originalGetProjectById;
|
||||
}
|
||||
});
|
||||
|
||||
test('toggleProjectStar flips star state and persists it', () => {
|
||||
const originalGetProjectById = projectsDb.getProjectById;
|
||||
const originalUpdateProjectIsStarredById = projectsDb.updateProjectIsStarredById;
|
||||
|
||||
let capturedProjectId = '';
|
||||
let capturedState = false;
|
||||
|
||||
try {
|
||||
projectsDb.getProjectById = () =>
|
||||
({
|
||||
project_id: 'project-1',
|
||||
project_path: '/workspace/project-1',
|
||||
custom_project_name: 'project-1',
|
||||
isStarred: 0,
|
||||
isArchived: 0,
|
||||
}) as ProjectRow;
|
||||
projectsDb.updateProjectIsStarredById = (projectId: string, isStarred: boolean) => {
|
||||
capturedProjectId = projectId;
|
||||
capturedState = isStarred;
|
||||
};
|
||||
|
||||
const result = toggleProjectStar('project-1');
|
||||
|
||||
assert.equal(result.isStarred, true);
|
||||
assert.equal(capturedProjectId, 'project-1');
|
||||
assert.equal(capturedState, true);
|
||||
} finally {
|
||||
projectsDb.getProjectById = originalGetProjectById;
|
||||
projectsDb.updateProjectIsStarredById = originalUpdateProjectIsStarredById;
|
||||
}
|
||||
});
|
||||
|
||||
test('applyLegacyStarredProjectIds stars only valid, unstarred projects', () => {
|
||||
const originalGetProjectById = projectsDb.getProjectById;
|
||||
const originalUpdateProjectIsStarredById = projectsDb.updateProjectIsStarredById;
|
||||
|
||||
const updatedProjectIds: string[] = [];
|
||||
|
||||
try {
|
||||
projectsDb.getProjectById = (projectId: string) => {
|
||||
if (projectId === 'project-a') {
|
||||
return {
|
||||
project_id: 'project-a',
|
||||
project_path: '/workspace/project-a',
|
||||
custom_project_name: 'A',
|
||||
isStarred: 0,
|
||||
isArchived: 0,
|
||||
} as ProjectRow;
|
||||
}
|
||||
|
||||
if (projectId === 'project-b') {
|
||||
return {
|
||||
project_id: 'project-b',
|
||||
project_path: '/workspace/project-b',
|
||||
custom_project_name: 'B',
|
||||
isStarred: 1,
|
||||
isArchived: 0,
|
||||
} as ProjectRow;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
projectsDb.updateProjectIsStarredById = (projectId: string) => {
|
||||
updatedProjectIds.push(projectId);
|
||||
};
|
||||
|
||||
const result = applyLegacyStarredProjectIds([
|
||||
'project-a',
|
||||
'project-b',
|
||||
'missing-project',
|
||||
'project-a',
|
||||
'',
|
||||
' ',
|
||||
]);
|
||||
|
||||
assert.equal(result.updated, 1);
|
||||
assert.deepEqual(updatedProjectIds, ['project-a']);
|
||||
} finally {
|
||||
projectsDb.getProjectById = originalGetProjectById;
|
||||
projectsDb.updateProjectIsStarredById = originalUpdateProjectIsStarredById;
|
||||
}
|
||||
});
|
||||
@@ -0,0 +1,105 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import {
|
||||
getProjectTaskMaster,
|
||||
getProjectTaskMasterById,
|
||||
} from '@/modules/projects/services/projects-has-taskmaster.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
test('getProjectTaskMasterById returns null when project path is missing', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => null,
|
||||
detectTaskMasterFolder: async () => {
|
||||
throw new Error('detectTaskMasterFolder should not be called when path is missing');
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(result, null);
|
||||
});
|
||||
|
||||
test('getProjectTaskMasterById returns configured status when taskmaster exists with essential files', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => '/workspace/project-1',
|
||||
detectTaskMasterFolder: async () => ({
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles: true,
|
||||
metadata: {
|
||||
taskCount: 3,
|
||||
subtaskCount: 0,
|
||||
completed: 1,
|
||||
pending: 2,
|
||||
inProgress: 0,
|
||||
review: 0,
|
||||
completionPercentage: 33,
|
||||
lastModified: '2026-01-01T00:00:00.000Z',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
assert.ok(result);
|
||||
assert.equal(result.projectId, 'project-1');
|
||||
assert.equal(result.projectPath, '/workspace/project-1');
|
||||
assert.equal(result.taskmaster.hasTaskmaster, true);
|
||||
assert.equal(result.taskmaster.hasEssentialFiles, true);
|
||||
assert.equal(result.taskmaster.status, 'configured');
|
||||
assert.deepEqual(result.taskmaster.metadata, {
|
||||
taskCount: 3,
|
||||
subtaskCount: 0,
|
||||
completed: 1,
|
||||
pending: 2,
|
||||
inProgress: 0,
|
||||
review: 0,
|
||||
completionPercentage: 33,
|
||||
lastModified: '2026-01-01T00:00:00.000Z',
|
||||
});
|
||||
});
|
||||
|
||||
test('getProjectTaskMasterById returns not-configured status when taskmaster is missing', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => '/workspace/project-1',
|
||||
detectTaskMasterFolder: async () => ({
|
||||
hasTaskmaster: false,
|
||||
}),
|
||||
});
|
||||
|
||||
assert.ok(result);
|
||||
assert.equal(result.taskmaster.hasTaskmaster, false);
|
||||
assert.equal(result.taskmaster.hasEssentialFiles, false);
|
||||
assert.equal(result.taskmaster.status, 'not-configured');
|
||||
assert.equal(result.taskmaster.metadata, null);
|
||||
});
|
||||
|
||||
test('getProjectTaskMaster throws when project id is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
getProjectTaskMaster('', async () => ({
|
||||
projectId: 'project-1',
|
||||
projectPath: '/workspace/project-1',
|
||||
taskmaster: {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles: true,
|
||||
metadata: null,
|
||||
status: 'configured',
|
||||
},
|
||||
})),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_ID_REQUIRED');
|
||||
assert.equal(error.statusCode, 400);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('getProjectTaskMaster throws when project does not exist', async () => {
|
||||
await assert.rejects(
|
||||
async () => getProjectTaskMaster('project-that-does-not-exist', async () => null),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_NOT_FOUND');
|
||||
assert.equal(error.statusCode, 404);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
4
server/modules/providers/index.ts
Normal file
4
server/modules/providers/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export { sessionSynchronizerService } from './services/session-synchronizer.service.js';
|
||||
|
||||
export { initializeSessionsWatcher } from './services/sessions-watcher.service.js';
|
||||
export { closeSessionsWatcher } from './services/sessions-watcher.service.js';
|
||||
@@ -0,0 +1,110 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
buildLookupMap,
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Claude transcript artifacts.
|
||||
*/
|
||||
export class ClaudeSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'claude' as const;
|
||||
private readonly claudeHome = path.join(os.homedir(), '.claude');
|
||||
|
||||
/**
|
||||
* Scans ~/.claude/projects and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const nameMap = await buildLookupMap(path.join(this.claudeHome, 'history.jsonl'), 'sessionId', 'display');
|
||||
const files = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.claudeHome, 'projects'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Claude session JSONL file.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nameMap = await buildLookupMap(path.join(this.claudeHome, 'history.jsonl'), 'sessionId', 'display');
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Claude JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(
|
||||
filePath: string,
|
||||
nameMap: Map<string, string>
|
||||
): Promise<ParsedSession | null> {
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, unknown>;
|
||||
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : undefined;
|
||||
const projectPath = typeof data.cwd === 'string' ? data.cwd : undefined;
|
||||
|
||||
if (!sessionId || !projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Claude Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,12 @@
|
||||
import { getSessionMessages } from '@/projects.js';
|
||||
import fs from 'node:fs';
|
||||
import fsp from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import type { IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js';
|
||||
import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js';
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
|
||||
const PROVIDER = 'claude';
|
||||
|
||||
@@ -15,17 +20,184 @@ type ClaudeToolResult = {
|
||||
type ClaudeHistoryResult =
|
||||
| AnyRecord[]
|
||||
| {
|
||||
messages?: AnyRecord[];
|
||||
total?: number;
|
||||
hasMore?: boolean;
|
||||
};
|
||||
messages?: AnyRecord[];
|
||||
total?: number;
|
||||
hasMore?: boolean;
|
||||
};
|
||||
|
||||
const loadClaudeSessionMessages = getSessionMessages as unknown as (
|
||||
projectName: string,
|
||||
type ClaudeHistoryMessagesResult =
|
||||
| AnyRecord[]
|
||||
| {
|
||||
messages: AnyRecord[];
|
||||
total: number;
|
||||
hasMore: boolean;
|
||||
offset?: number;
|
||||
limit?: number | null;
|
||||
};
|
||||
|
||||
async function parseAgentTools(filePath: string): Promise<AnyRecord[]> {
|
||||
const tools: AnyRecord[] = [];
|
||||
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = JSON.parse(line) as AnyRecord;
|
||||
|
||||
if (entry.message?.role === 'assistant' && Array.isArray(entry.message?.content)) {
|
||||
for (const part of entry.message.content as AnyRecord[]) {
|
||||
if (part.type === 'tool_use') {
|
||||
tools.push({
|
||||
toolId: part.id,
|
||||
toolName: part.name,
|
||||
toolInput: part.input,
|
||||
timestamp: entry.timestamp,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.message?.role === 'user' && Array.isArray(entry.message?.content)) {
|
||||
for (const part of entry.message.content as AnyRecord[]) {
|
||||
if (part.type !== 'tool_result') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tool = tools.find((candidate) => candidate.toolId === part.tool_use_id);
|
||||
if (!tool) {
|
||||
continue;
|
||||
}
|
||||
|
||||
tool.toolResult = {
|
||||
content: typeof part.content === 'string'
|
||||
? part.content
|
||||
: Array.isArray(part.content)
|
||||
? part.content
|
||||
.map((contentPart: AnyRecord) => contentPart?.text || '')
|
||||
.join('\n')
|
||||
: JSON.stringify(part.content),
|
||||
isError: Boolean(part.is_error),
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed lines that can happen during concurrent writes.
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`Error parsing agent file ${filePath}:`, message);
|
||||
}
|
||||
|
||||
return tools;
|
||||
}
|
||||
|
||||
async function getSessionMessages(
|
||||
sessionId: string,
|
||||
limit: number | null,
|
||||
offset: number,
|
||||
) => Promise<ClaudeHistoryResult>;
|
||||
): Promise<ClaudeHistoryMessagesResult> {
|
||||
try {
|
||||
const jsonLPath = sessionsDb.getSessionById(sessionId)?.jsonl_path;
|
||||
|
||||
if (!jsonLPath) {
|
||||
return { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
|
||||
const projectDir = path.dirname(jsonLPath);
|
||||
const files = await fsp.readdir(projectDir);
|
||||
const agentFiles = files.filter((file) => file.endsWith('.jsonl') && file.startsWith('agent-'));
|
||||
|
||||
const messages: AnyRecord[] = [];
|
||||
const agentToolsCache = new Map<string, AnyRecord[]>();
|
||||
|
||||
const fileStream = fs.createReadStream(jsonLPath);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = JSON.parse(line) as AnyRecord;
|
||||
if (entry.sessionId === sessionId) {
|
||||
messages.push(entry);
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed JSONL lines that can happen during concurrent writes.
|
||||
}
|
||||
}
|
||||
|
||||
const agentIds = new Set<string>();
|
||||
for (const message of messages) {
|
||||
const agentId = message.toolUseResult?.agentId;
|
||||
if (agentId) {
|
||||
agentIds.add(String(agentId));
|
||||
}
|
||||
}
|
||||
|
||||
for (const agentId of agentIds) {
|
||||
const agentFileName = `agent-${agentId}.jsonl`;
|
||||
if (!agentFiles.includes(agentFileName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const agentFilePath = path.join(projectDir, agentFileName);
|
||||
const tools = await parseAgentTools(agentFilePath);
|
||||
agentToolsCache.set(agentId, tools);
|
||||
}
|
||||
|
||||
for (const message of messages) {
|
||||
const agentId = message.toolUseResult?.agentId;
|
||||
if (!agentId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const agentTools = agentToolsCache.get(String(agentId));
|
||||
if (agentTools && agentTools.length > 0) {
|
||||
message.subagentTools = agentTools;
|
||||
}
|
||||
}
|
||||
|
||||
const sortedMessages = messages.sort(
|
||||
(a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(),
|
||||
);
|
||||
const total = sortedMessages.length;
|
||||
|
||||
if (limit === null) {
|
||||
return sortedMessages;
|
||||
}
|
||||
|
||||
const startIndex = Math.max(0, total - offset - limit);
|
||||
const endIndex = total - offset;
|
||||
const paginatedMessages = sortedMessages.slice(startIndex, endIndex);
|
||||
const hasMore = startIndex > 0;
|
||||
|
||||
return {
|
||||
messages: paginatedMessages,
|
||||
total,
|
||||
hasMore,
|
||||
offset,
|
||||
limit,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error reading messages for session ${sessionId}:`, error);
|
||||
return limit === null ? [] : { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Claude writes internal command and system reminder entries into history.
|
||||
@@ -238,14 +410,11 @@ export class ClaudeSessionsProvider implements IProviderSessions {
|
||||
sessionId: string,
|
||||
options: FetchHistoryOptions = {},
|
||||
): Promise<FetchHistoryResult> {
|
||||
const { projectName, limit = null, offset = 0 } = options;
|
||||
if (!projectName) {
|
||||
return { messages: [], total: 0, hasMore: false, offset: 0, limit: null };
|
||||
}
|
||||
const { limit = null, offset = 0 } = options;
|
||||
|
||||
let result: ClaudeHistoryResult;
|
||||
try {
|
||||
result = await loadClaudeSessionMessages(projectName, sessionId, limit, offset);
|
||||
result = await getSessionMessages(sessionId, limit, offset);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[ClaudeProvider] Failed to load session ${sessionId}:`, message);
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { ClaudeProviderAuth } from '@/modules/providers/list/claude/claude-auth.provider.js';
|
||||
import { ClaudeMcpProvider } from '@/modules/providers/list/claude/claude-mcp.provider.js';
|
||||
import { ClaudeSessionSynchronizer } from '@/modules/providers/list/claude/claude-session-synchronizer.provider.js';
|
||||
import { ClaudeSessionsProvider } from '@/modules/providers/list/claude/claude-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class ClaudeProvider extends AbstractProvider {
|
||||
readonly mcp = new ClaudeMcpProvider();
|
||||
readonly auth: IProviderAuth = new ClaudeProviderAuth();
|
||||
readonly sessions: IProviderSessions = new ClaudeSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new ClaudeSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('claude');
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
buildLookupMap,
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Codex transcript artifacts.
|
||||
*/
|
||||
export class CodexSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'codex' as const;
|
||||
private readonly codexHome = path.join(os.homedir(), '.codex');
|
||||
|
||||
/**
|
||||
* Scans ~/.codex/sessions and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const nameMap = await buildLookupMap(path.join(this.codexHome, 'session_index.jsonl'), 'id', 'thread_name');
|
||||
const files = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.codexHome, 'sessions'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingSession = sessionsDb.getSessionById(parsed.sessionId);
|
||||
if (existingSession) {
|
||||
// If session name is untitled and we now have a name, update it
|
||||
if (existingSession.custom_name === 'Untitled Codex Session' && parsed.sessionName && parsed.sessionName !== 'Untitled Codex Session') {
|
||||
sessionsDb.updateSessionCustomName(parsed.sessionId, parsed.sessionName);
|
||||
}
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Codex session JSONL file.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nameMap = await buildLookupMap(path.join(this.codexHome, 'session_index.jsonl'), 'id', 'thread_name');
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Codex JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(
|
||||
filePath: string,
|
||||
nameMap: Map<string, string>
|
||||
): Promise<ParsedSession | null> {
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, unknown>;
|
||||
const payload = data.payload as Record<string, unknown> | undefined;
|
||||
const sessionId = typeof payload?.id === 'string' ? payload.id : undefined;
|
||||
const projectPath = typeof payload?.cwd === 'string' ? payload.cwd : undefined;
|
||||
|
||||
if (!sessionId || !projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Codex Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
import { getCodexSessionMessages } from '@/projects.js';
|
||||
import fsSync from 'node:fs';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import type { IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js';
|
||||
import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js';
|
||||
@@ -11,14 +14,250 @@ type CodexHistoryResult =
|
||||
messages?: AnyRecord[];
|
||||
total?: number;
|
||||
hasMore?: boolean;
|
||||
offset?: number;
|
||||
limit?: number | null;
|
||||
tokenUsage?: unknown;
|
||||
};
|
||||
|
||||
const loadCodexSessionMessages = getCodexSessionMessages as unknown as (
|
||||
function isVisibleCodexUserMessage(payload: AnyRecord | null | undefined): boolean {
|
||||
if (!payload || payload.type !== 'user_message') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (payload.kind && payload.kind !== 'plain') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return typeof payload.message === 'string' && payload.message.trim().length > 0;
|
||||
}
|
||||
|
||||
function extractCodexTextContent(content: unknown): string {
|
||||
if (!Array.isArray(content)) {
|
||||
return typeof content === 'string' ? content : '';
|
||||
}
|
||||
|
||||
return content
|
||||
.map((item) => {
|
||||
if (!item || typeof item !== 'object') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const record = item as AnyRecord;
|
||||
if (
|
||||
(record.type === 'input_text' || record.type === 'output_text' || record.type === 'text')
|
||||
&& typeof record.text === 'string'
|
||||
) {
|
||||
return record.text;
|
||||
}
|
||||
|
||||
return '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
async function getCodexSessionMessages(
|
||||
sessionId: string,
|
||||
limit: number | null,
|
||||
offset: number,
|
||||
) => Promise<CodexHistoryResult>;
|
||||
limit: number | null = null,
|
||||
offset = 0,
|
||||
): Promise<CodexHistoryResult> {
|
||||
try {
|
||||
const sessionFilePath = sessionsDb.getSessionById(sessionId)?.jsonl_path;
|
||||
|
||||
if (!sessionFilePath) {
|
||||
console.warn(`Codex session file not found for session ${sessionId}`);
|
||||
return { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
|
||||
const messages: AnyRecord[] = [];
|
||||
let tokenUsage: AnyRecord | null = null;
|
||||
const fileStream = fsSync.createReadStream(sessionFilePath);
|
||||
const rl = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = JSON.parse(line) as AnyRecord;
|
||||
|
||||
if (entry.type === 'event_msg' && entry.payload?.type === 'token_count' && entry.payload?.info) {
|
||||
const info = entry.payload.info as AnyRecord;
|
||||
if (info.total_token_usage) {
|
||||
const usage = info.total_token_usage as AnyRecord;
|
||||
tokenUsage = {
|
||||
used: usage.total_tokens || 0,
|
||||
total: info.model_context_window || 200000,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'event_msg' && isVisibleCodexUserMessage(entry.payload as AnyRecord)) {
|
||||
messages.push({
|
||||
type: 'user',
|
||||
timestamp: entry.timestamp,
|
||||
message: {
|
||||
role: 'user',
|
||||
content: entry.payload.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
entry.type === 'response_item' &&
|
||||
entry.payload?.type === 'message' &&
|
||||
entry.payload.role === 'assistant'
|
||||
) {
|
||||
const textContent = extractCodexTextContent(entry.payload.content);
|
||||
if (textContent.trim()) {
|
||||
messages.push({
|
||||
type: 'assistant',
|
||||
timestamp: entry.timestamp,
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: textContent,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'reasoning') {
|
||||
const summaryText = Array.isArray(entry.payload.summary)
|
||||
? entry.payload.summary
|
||||
.map((item: AnyRecord) => item?.text)
|
||||
.filter(Boolean)
|
||||
.join('\n')
|
||||
: '';
|
||||
|
||||
if (summaryText.trim()) {
|
||||
messages.push({
|
||||
type: 'thinking',
|
||||
timestamp: entry.timestamp,
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: summaryText,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'function_call') {
|
||||
let toolName = entry.payload.name;
|
||||
let toolInput = entry.payload.arguments;
|
||||
|
||||
if (toolName === 'shell_command') {
|
||||
toolName = 'Bash';
|
||||
try {
|
||||
const args = JSON.parse(entry.payload.arguments) as AnyRecord;
|
||||
toolInput = JSON.stringify({ command: args.command });
|
||||
} catch {
|
||||
// Keep original arguments when parsing fails.
|
||||
}
|
||||
}
|
||||
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
timestamp: entry.timestamp,
|
||||
toolName,
|
||||
toolInput,
|
||||
toolCallId: entry.payload.call_id,
|
||||
});
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'function_call_output') {
|
||||
messages.push({
|
||||
type: 'tool_result',
|
||||
timestamp: entry.timestamp,
|
||||
toolCallId: entry.payload.call_id,
|
||||
output: entry.payload.output,
|
||||
});
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call') {
|
||||
const toolName = entry.payload.name || 'custom_tool';
|
||||
const input = entry.payload.input || '';
|
||||
|
||||
if (toolName === 'apply_patch') {
|
||||
const fileMatch = String(input).match(/\*\*\* Update File: (.+)/);
|
||||
const filePath = fileMatch ? fileMatch[1].trim() : 'unknown';
|
||||
const lines = String(input).split('\n');
|
||||
const oldLines: string[] = [];
|
||||
const newLines: string[] = [];
|
||||
|
||||
for (const lineContent of lines) {
|
||||
if (lineContent.startsWith('-') && !lineContent.startsWith('---')) {
|
||||
oldLines.push(lineContent.slice(1));
|
||||
} else if (lineContent.startsWith('+') && !lineContent.startsWith('+++')) {
|
||||
newLines.push(lineContent.slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
timestamp: entry.timestamp,
|
||||
toolName: 'Edit',
|
||||
toolInput: JSON.stringify({
|
||||
file_path: filePath,
|
||||
old_string: oldLines.join('\n'),
|
||||
new_string: newLines.join('\n'),
|
||||
}),
|
||||
toolCallId: entry.payload.call_id,
|
||||
});
|
||||
} else {
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
timestamp: entry.timestamp,
|
||||
toolName,
|
||||
toolInput: input,
|
||||
toolCallId: entry.payload.call_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (entry.type === 'response_item' && entry.payload?.type === 'custom_tool_call_output') {
|
||||
messages.push({
|
||||
type: 'tool_result',
|
||||
timestamp: entry.timestamp,
|
||||
toolCallId: entry.payload.call_id,
|
||||
output: entry.payload.output || '',
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed lines.
|
||||
}
|
||||
}
|
||||
|
||||
messages.sort(
|
||||
(a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(),
|
||||
);
|
||||
const total = messages.length;
|
||||
|
||||
if (limit !== null) {
|
||||
const startIndex = Math.max(0, total - offset - limit);
|
||||
const endIndex = total - offset;
|
||||
const paginatedMessages = messages.slice(startIndex, endIndex);
|
||||
const hasMore = startIndex > 0;
|
||||
|
||||
return {
|
||||
messages: paginatedMessages,
|
||||
total,
|
||||
hasMore,
|
||||
offset,
|
||||
limit,
|
||||
tokenUsage,
|
||||
};
|
||||
}
|
||||
|
||||
return { messages, tokenUsage };
|
||||
} catch (error) {
|
||||
console.error(`Error reading Codex session messages for ${sessionId}:`, error);
|
||||
return { messages: [], total: 0, hasMore: false };
|
||||
}
|
||||
}
|
||||
|
||||
export class CodexSessionsProvider implements IProviderSessions {
|
||||
/**
|
||||
@@ -31,6 +270,23 @@ export class CodexSessionsProvider implements IProviderSessions {
|
||||
const ts = raw.timestamp || new Date().toISOString();
|
||||
const baseId = raw.uuid || generateMessageId('codex');
|
||||
|
||||
if (raw.type === 'thinking' || raw.isReasoning) {
|
||||
const thinkingContent = typeof raw.message?.content === 'string'
|
||||
? raw.message.content
|
||||
: '';
|
||||
if (!thinkingContent.trim()) {
|
||||
return [];
|
||||
}
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: thinkingContent,
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.message?.role === 'user') {
|
||||
const content = typeof raw.message.content === 'string'
|
||||
? raw.message.content
|
||||
@@ -77,17 +333,6 @@ export class CodexSessionsProvider implements IProviderSessions {
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.type === 'thinking' || raw.isReasoning) {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: raw.message?.content || '',
|
||||
})];
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_use' || raw.toolName) {
|
||||
return [createNormalizedMessage({
|
||||
id: baseId,
|
||||
@@ -275,7 +520,7 @@ export class CodexSessionsProvider implements IProviderSessions {
|
||||
|
||||
let result: CodexHistoryResult;
|
||||
try {
|
||||
result = await loadCodexSessionMessages(sessionId, limit, offset);
|
||||
result = await getCodexSessionMessages(sessionId, limit, offset);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[CodexProvider] Failed to load session ${sessionId}:`, message);
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { CodexProviderAuth } from '@/modules/providers/list/codex/codex-auth.provider.js';
|
||||
import { CodexMcpProvider } from '@/modules/providers/list/codex/codex-mcp.provider.js';
|
||||
import { CodexSessionSynchronizer } from '@/modules/providers/list/codex/codex-session-synchronizer.provider.js';
|
||||
import { CodexSessionsProvider } from '@/modules/providers/list/codex/codex-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class CodexProvider extends AbstractProvider {
|
||||
readonly mcp = new CodexMcpProvider();
|
||||
readonly auth: IProviderAuth = new CodexProviderAuth();
|
||||
readonly sessions: IProviderSessions = new CodexSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new CodexSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('codex');
|
||||
|
||||
@@ -0,0 +1,176 @@
|
||||
import crypto from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import fsp from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns directory entries or an empty list when the folder is missing.
|
||||
*/
|
||||
async function listDirectoryEntriesSafe(
|
||||
directoryPath: string
|
||||
): Promise<import('node:fs').Dirent[]> {
|
||||
try {
|
||||
return await fsp.readdir(directoryPath, { withFileTypes: true });
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Session indexer for Cursor transcript artifacts.
|
||||
*/
|
||||
export class CursorSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'cursor' as const;
|
||||
private readonly cursorHome = path.join(os.homedir(), '.cursor');
|
||||
|
||||
/**
|
||||
* Scans Cursor chats and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const projectsDir = path.join(this.cursorHome, 'projects');
|
||||
const projectEntries = await listDirectoryEntriesSafe(projectsDir);
|
||||
const seenProjectPaths = new Set<string>();
|
||||
|
||||
let processed = 0;
|
||||
for (const entry of projectEntries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const workerLogPath = path.join(projectsDir, entry.name, 'worker.log');
|
||||
const projectPath = await this.extractProjectPathFromWorkerLog(workerLogPath);
|
||||
if (!projectPath || seenProjectPaths.has(projectPath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
seenProjectPaths.add(projectPath);
|
||||
const projectHash = this.md5(projectPath);
|
||||
const chatsDir = path.join(this.cursorHome, 'chats', projectHash);
|
||||
const files = await findFilesRecursivelyCreatedAfter(chatsDir, '.jsonl', since ?? null);
|
||||
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Cursor session JSONL file.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = await this.processSessionFile(filePath);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces the same project hash Cursor uses in chat directory names.
|
||||
*/
|
||||
private md5(input: string): string {
|
||||
return crypto.createHash('md5').update(input).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts project path from Cursor worker.log.
|
||||
*/
|
||||
private async extractProjectPathFromWorkerLog(filePath: string): Promise<string | null> {
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath, { encoding: 'utf8' });
|
||||
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const match = line.match(/workspacePath=(.*)$/);
|
||||
const projectPath = match?.[1]?.trim();
|
||||
if (projectPath) {
|
||||
lineReader.close();
|
||||
fileStream.close();
|
||||
return projectPath;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing worker logs are valid for partial or incomplete session data.
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Cursor JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(filePath: string): Promise<ParsedSession | null> {
|
||||
const sessionId = path.basename(filePath, '.jsonl');
|
||||
const grandparentDir = path.dirname(path.dirname(filePath));
|
||||
const workerLogPath = path.join(grandparentDir, 'worker.log');
|
||||
const projectPath = await this.extractProjectPathFromWorkerLog(workerLogPath);
|
||||
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, any>;
|
||||
if (data.role !== 'user') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const text = typeof data.message?.content?.[0]?.text === 'string' ? data.message.content[0].text : '';
|
||||
const firstLine = text.replace(/<\/?user_query>/g, '').trim().split('\n')[0];
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(firstLine, 'Untitled Cursor Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,15 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { CursorProviderAuth } from '@/modules/providers/list/cursor/cursor-auth.provider.js';
|
||||
import { CursorMcpProvider } from '@/modules/providers/list/cursor/cursor-mcp.provider.js';
|
||||
import { CursorSessionSynchronizer } from '@/modules/providers/list/cursor/cursor-session-synchronizer.provider.js';
|
||||
import { CursorSessionsProvider } from '@/modules/providers/list/cursor/cursor-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class CursorProvider extends AbstractProvider {
|
||||
readonly mcp = new CursorMcpProvider();
|
||||
readonly auth: IProviderAuth = new CursorProviderAuth();
|
||||
readonly sessions: IProviderSessions = new CursorSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new CursorSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('cursor');
|
||||
|
||||
@@ -0,0 +1,401 @@
|
||||
import crypto from 'node:crypto';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import { projectsDb, sessionsDb } from '@/modules/database/index.js';
|
||||
import {
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeProjectPath,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/shared/utils.js';
|
||||
import type { IProviderSessionSynchronizer } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord } from '@/shared/types.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
projectPath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
type GeminiJsonlMetadata = {
|
||||
sessionId: string;
|
||||
projectPath?: string;
|
||||
projectHash?: string;
|
||||
firstUserMessage?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Gemini transcript artifacts.
|
||||
*/
|
||||
export class GeminiSessionSynchronizer implements IProviderSessionSynchronizer {
|
||||
private readonly provider = 'gemini' as const;
|
||||
private readonly geminiHome = path.join(os.homedir(), '.gemini');
|
||||
|
||||
/**
|
||||
* Scans Gemini legacy JSON and new JSONL artifacts and upserts sessions into DB.
|
||||
*/
|
||||
async synchronize(since?: Date): Promise<number> {
|
||||
const projectHashLookup = this.buildProjectHashLookup();
|
||||
|
||||
const legacySessionFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'sessions'),
|
||||
'.json',
|
||||
since ?? null
|
||||
);
|
||||
const legacyTempFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'tmp'),
|
||||
'.json',
|
||||
since ?? null
|
||||
);
|
||||
const jsonlSessionFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'sessions'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
const jsonlTempFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(this.geminiHome, 'tmp'),
|
||||
'.jsonl',
|
||||
since ?? null
|
||||
);
|
||||
|
||||
// Process legacy JSON first, then JSONL. If both exist for a session id,
|
||||
// the JSONL artifact becomes the canonical jsonl_path via upsert.
|
||||
const files = [
|
||||
...legacySessionFiles,
|
||||
...legacyTempFiles,
|
||||
...jsonlSessionFiles,
|
||||
...jsonlTempFiles,
|
||||
];
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
if (this.shouldSkipTempArtifact(filePath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsed = filePath.endsWith('.jsonl')
|
||||
? await this.processJsonlSessionFile(filePath, projectHashLookup)
|
||||
: await this.processLegacySessionFile(filePath);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and upserts one Gemini legacy JSON or JSONL artifact.
|
||||
*/
|
||||
async synchronizeFile(filePath: string): Promise<string | null> {
|
||||
if (!filePath.endsWith('.json') && !filePath.endsWith('.jsonl')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.shouldSkipTempArtifact(filePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = filePath.endsWith('.jsonl')
|
||||
? await this.processJsonlSessionFile(filePath, this.buildProjectHashLookup())
|
||||
: await this.processLegacySessionFile(filePath);
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
return sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.projectPath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Gemini legacy JSON artifact.
|
||||
*/
|
||||
private async processLegacySessionFile(filePath: string): Promise<ParsedSession | null> {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const data = JSON.parse(content) as AnyRecord;
|
||||
|
||||
const sessionId =
|
||||
typeof data.sessionId === 'string'
|
||||
? data.sessionId
|
||||
: typeof data.id === 'string'
|
||||
? data.id
|
||||
: undefined;
|
||||
if (!sessionId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const workspaceProjectPath = await this.resolveProjectPathFromChatWorkspace(filePath);
|
||||
const projectPath = typeof data.projectPath === 'string' && data.projectPath.trim().length > 0
|
||||
? data.projectPath
|
||||
: workspaceProjectPath;
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const messages = Array.isArray(data.messages) ? data.messages : [];
|
||||
const firstMessage = messages[0] as AnyRecord | undefined;
|
||||
let rawName: string | undefined;
|
||||
|
||||
if (Array.isArray(firstMessage?.content) && typeof firstMessage.content[0]?.text === 'string') {
|
||||
rawName = firstMessage.content[0].text;
|
||||
} else if (typeof firstMessage?.content === 'string') {
|
||||
rawName = firstMessage.content;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(rawName, 'New Gemini Chat'),
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Gemini JSONL artifact.
|
||||
*/
|
||||
private async processJsonlSessionFile(
|
||||
filePath: string,
|
||||
projectHashLookup: Map<string, string>
|
||||
): Promise<ParsedSession | null> {
|
||||
const metadata = await this.extractJsonlMetadata(filePath);
|
||||
if (!metadata) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let projectPath = typeof metadata.projectPath === 'string' ? metadata.projectPath.trim() : '';
|
||||
if (!projectPath) {
|
||||
const workspaceProjectPath = await this.resolveProjectPathFromChatWorkspace(filePath);
|
||||
if (workspaceProjectPath) {
|
||||
projectPath = workspaceProjectPath;
|
||||
}
|
||||
}
|
||||
if (!projectPath && typeof metadata.projectHash === 'string') {
|
||||
projectPath = projectHashLookup.get(metadata.projectHash.trim().toLowerCase()) ?? '';
|
||||
}
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Once we resolve a project hash/path pair, keep it in-memory for this sync run.
|
||||
if (typeof metadata.projectHash === 'string' && metadata.projectHash.trim()) {
|
||||
projectHashLookup.set(metadata.projectHash.trim().toLowerCase(), projectPath);
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId: metadata.sessionId,
|
||||
projectPath,
|
||||
sessionName: normalizeSessionName(metadata.firstUserMessage, 'New Gemini Chat'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads first useful metadata from Gemini JSONL files.
|
||||
*/
|
||||
private async extractJsonlMetadata(filePath: string): Promise<GeminiJsonlMetadata | null> {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
let sessionId: string | undefined;
|
||||
let projectPath: string | undefined;
|
||||
let projectHash: string | undefined;
|
||||
let firstUserMessage: string | undefined;
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsed: AnyRecord;
|
||||
try {
|
||||
parsed = JSON.parse(trimmed) as AnyRecord;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!sessionId && typeof parsed.sessionId === 'string') {
|
||||
sessionId = parsed.sessionId;
|
||||
}
|
||||
if (!projectPath && typeof parsed.projectPath === 'string') {
|
||||
projectPath = parsed.projectPath;
|
||||
}
|
||||
if (!projectHash && typeof parsed.projectHash === 'string') {
|
||||
projectHash = parsed.projectHash;
|
||||
}
|
||||
|
||||
if (!firstUserMessage && parsed.type === 'user') {
|
||||
firstUserMessage = this.extractGeminiTextContent(parsed.content);
|
||||
}
|
||||
|
||||
if (sessionId && (projectPath || projectHash) && firstUserMessage) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!sessionId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
projectPath,
|
||||
projectHash,
|
||||
firstUserMessage,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to resolve project root from Gemini tmp chat workspaces.
|
||||
*/
|
||||
private async resolveProjectPathFromChatWorkspace(filePath: string): Promise<string> {
|
||||
if (!filePath.includes(`${path.sep}chats${path.sep}`)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const chatsDir = path.dirname(filePath);
|
||||
const workspaceDir = path.dirname(chatsDir);
|
||||
const projectRootPath = path.join(workspaceDir, '.project_root');
|
||||
|
||||
try {
|
||||
const rootContent = await readFile(projectRootPath, 'utf8');
|
||||
return rootContent.trim();
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a hash->path lookup for Gemini JSONL metadata that stores projectHash.
|
||||
*/
|
||||
private buildProjectHashLookup(): Map<string, string> {
|
||||
const lookup = new Map<string, string>();
|
||||
const knownPaths = new Set<string>();
|
||||
|
||||
for (const project of projectsDb.getProjectPaths()) {
|
||||
if (typeof project.project_path === 'string' && project.project_path.trim()) {
|
||||
knownPaths.add(project.project_path.trim());
|
||||
}
|
||||
}
|
||||
|
||||
for (const session of sessionsDb.getAllSessions()) {
|
||||
if (session.provider === this.provider && typeof session.project_path === 'string' && session.project_path.trim()) {
|
||||
knownPaths.add(session.project_path.trim());
|
||||
}
|
||||
}
|
||||
|
||||
for (const knownPath of knownPaths) {
|
||||
this.addProjectHashCandidates(lookup, knownPath);
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds likely Gemini hash variants for one project path.
|
||||
*/
|
||||
private addProjectHashCandidates(lookup: Map<string, string>, projectPath: string): void {
|
||||
const trimmed = projectPath.trim();
|
||||
if (!trimmed) {
|
||||
return;
|
||||
}
|
||||
|
||||
const normalized = normalizeProjectPath(trimmed);
|
||||
const resolved = path.resolve(trimmed);
|
||||
const resolvedNormalized = normalizeProjectPath(resolved);
|
||||
|
||||
const candidates = new Set<string>([
|
||||
trimmed,
|
||||
normalized,
|
||||
resolved,
|
||||
resolvedNormalized,
|
||||
]);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
for (const candidate of [...candidates]) {
|
||||
candidates.add(candidate.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
for (const candidate of candidates) {
|
||||
if (!candidate) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const hash = this.sha256(candidate);
|
||||
if (!lookup.has(hash)) {
|
||||
lookup.set(hash, trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns first user text from Gemini content payload shapes.
|
||||
*/
|
||||
private extractGeminiTextContent(content: unknown): string | undefined {
|
||||
if (typeof content === 'string' && content.trim().length > 0) {
|
||||
return content;
|
||||
}
|
||||
|
||||
if (!Array.isArray(content)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
for (const part of content) {
|
||||
if (typeof part === 'string' && part.trim().length > 0) {
|
||||
return part;
|
||||
}
|
||||
|
||||
if (part && typeof part === 'object' && typeof (part as AnyRecord).text === 'string') {
|
||||
const text = (part as AnyRecord).text;
|
||||
if (text.trim().length > 0) {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Keeps tmp scanning scoped to chat artifacts only.
|
||||
*/
|
||||
private shouldSkipTempArtifact(filePath: string): boolean {
|
||||
return (
|
||||
filePath.startsWith(path.join(this.geminiHome, 'tmp'))
|
||||
&& !filePath.includes(`${path.sep}chats${path.sep}`)
|
||||
);
|
||||
}
|
||||
|
||||
private sha256(value: string): string {
|
||||
return crypto.createHash('sha256').update(value).digest('hex');
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,249 @@
|
||||
import sessionManager from '@/sessionManager.js';
|
||||
import { getGeminiCliSessionMessages } from '@/projects.js';
|
||||
import fsSync from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import type { IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { AnyRecord, FetchHistoryOptions, FetchHistoryResult, NormalizedMessage } from '@/shared/types.js';
|
||||
import { createNormalizedMessage, generateMessageId, readObjectRecord } from '@/shared/utils.js';
|
||||
|
||||
const PROVIDER = 'gemini';
|
||||
|
||||
type GeminiHistoryResult = {
|
||||
messages: AnyRecord[];
|
||||
tokenUsage?: unknown;
|
||||
};
|
||||
|
||||
function mapGeminiRole(value: unknown): 'user' | 'assistant' | null {
|
||||
if (value === 'user') {
|
||||
return 'user';
|
||||
}
|
||||
|
||||
if (value === 'gemini' || value === 'assistant') {
|
||||
return 'assistant';
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function extractGeminiTextContent(content: unknown): string {
|
||||
if (typeof content === 'string') {
|
||||
return content;
|
||||
}
|
||||
|
||||
if (!Array.isArray(content)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return content
|
||||
.map((part) => {
|
||||
if (typeof part === 'string') {
|
||||
return part;
|
||||
}
|
||||
if (!part || typeof part !== 'object') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const record = part as AnyRecord;
|
||||
if (typeof record.text === 'string') {
|
||||
return record.text;
|
||||
}
|
||||
|
||||
return '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function extractGeminiThoughts(thoughts: unknown): string {
|
||||
if (!Array.isArray(thoughts)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return thoughts
|
||||
.map((item) => {
|
||||
if (!item || typeof item !== 'object') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const record = item as AnyRecord;
|
||||
const subject = typeof record.subject === 'string' ? record.subject.trim() : '';
|
||||
const description = typeof record.description === 'string' ? record.description.trim() : '';
|
||||
|
||||
if (subject && description) {
|
||||
return `${subject}: ${description}`;
|
||||
}
|
||||
|
||||
return description || subject;
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function buildGeminiTokenUsage(tokens: unknown): AnyRecord | undefined {
|
||||
if (!tokens || typeof tokens !== 'object') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const record = tokens as AnyRecord;
|
||||
const input = Number(record.input || 0);
|
||||
const output = Number(record.output || 0);
|
||||
const cached = Number(record.cached || 0);
|
||||
const thoughts = Number(record.thoughts || 0);
|
||||
const tool = Number(record.tool || 0);
|
||||
|
||||
const totalFromFields = input + output + cached + thoughts + tool;
|
||||
const total = Number(record.total || totalFromFields || 0);
|
||||
|
||||
return {
|
||||
used: total,
|
||||
total: total,
|
||||
breakdown: {
|
||||
input,
|
||||
output,
|
||||
cached,
|
||||
thoughts,
|
||||
tool,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function getGeminiLegacySessionMessages(sessionFilePath: string): Promise<GeminiHistoryResult> {
|
||||
try {
|
||||
const data = await fs.readFile(sessionFilePath, 'utf8');
|
||||
const session = JSON.parse(data) as AnyRecord;
|
||||
const sourceMessages = Array.isArray(session.messages) ? session.messages : [];
|
||||
|
||||
const messages: AnyRecord[] = [];
|
||||
for (const msg of sourceMessages) {
|
||||
const role = mapGeminiRole(msg.type ?? msg.role);
|
||||
if (!role) {
|
||||
continue;
|
||||
}
|
||||
|
||||
messages.push({
|
||||
type: 'message',
|
||||
uuid: typeof msg.id === 'string' ? msg.id : undefined,
|
||||
message: { role, content: msg.content },
|
||||
timestamp: msg.timestamp || null,
|
||||
});
|
||||
}
|
||||
|
||||
return { messages };
|
||||
} catch {
|
||||
return { messages: [] };
|
||||
}
|
||||
}
|
||||
|
||||
async function getGeminiJsonlSessionMessages(sessionFilePath: string): Promise<GeminiHistoryResult> {
|
||||
const messages: AnyRecord[] = [];
|
||||
let tokenUsage: AnyRecord | undefined;
|
||||
|
||||
try {
|
||||
const fileStream = fsSync.createReadStream(sessionFilePath);
|
||||
const lineReader = readline.createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let entry: AnyRecord;
|
||||
try {
|
||||
entry = JSON.parse(trimmed) as AnyRecord;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Metadata/update lines (e.g. {$set:{lastUpdated:...}}) do not represent chat messages.
|
||||
if (entry.$set) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const role = mapGeminiRole(entry.type);
|
||||
if (role) {
|
||||
const textContent = extractGeminiTextContent(entry.content);
|
||||
if (textContent.trim()) {
|
||||
messages.push({
|
||||
type: 'message',
|
||||
uuid: typeof entry.id === 'string' ? entry.id : undefined,
|
||||
message: { role, content: textContent },
|
||||
timestamp: entry.timestamp || null,
|
||||
});
|
||||
}
|
||||
|
||||
const thinkingContent = extractGeminiThoughts(entry.thoughts);
|
||||
if (thinkingContent.trim()) {
|
||||
messages.push({
|
||||
type: 'thinking',
|
||||
uuid: typeof entry.id === 'string' ? `${entry.id}_thinking` : undefined,
|
||||
message: { role: 'assistant', content: thinkingContent },
|
||||
timestamp: entry.timestamp || null,
|
||||
isReasoning: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (role === 'assistant') {
|
||||
const usage = buildGeminiTokenUsage(entry.tokens);
|
||||
if (usage) {
|
||||
tokenUsage = usage;
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.type === 'tool_use') {
|
||||
messages.push({
|
||||
type: 'tool_use',
|
||||
uuid: typeof entry.id === 'string' ? entry.id : undefined,
|
||||
timestamp: entry.timestamp || null,
|
||||
toolName: entry.tool_name || entry.name || 'Tool',
|
||||
toolInput: entry.parameters ?? entry.input ?? entry.arguments ?? '',
|
||||
toolCallId: entry.tool_id || entry.toolCallId || entry.id,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.type === 'tool_result') {
|
||||
messages.push({
|
||||
type: 'tool_result',
|
||||
uuid: typeof entry.id === 'string' ? entry.id : undefined,
|
||||
timestamp: entry.timestamp || null,
|
||||
toolCallId: entry.tool_id || entry.toolCallId || entry.id || '',
|
||||
output: entry.output ?? entry.result ?? '',
|
||||
isError: Boolean(entry.error) || entry.status === 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return { messages: [] };
|
||||
}
|
||||
|
||||
messages.sort(
|
||||
(a, b) => new Date(a.timestamp || 0).getTime() - new Date(b.timestamp || 0).getTime(),
|
||||
);
|
||||
|
||||
return { messages, tokenUsage };
|
||||
}
|
||||
|
||||
async function getGeminiCliSessionMessages(sessionId: string): Promise<GeminiHistoryResult> {
|
||||
const sessionFilePath = sessionsDb.getSessionById(sessionId)?.jsonl_path;
|
||||
if (!sessionFilePath) {
|
||||
return { messages: [] };
|
||||
}
|
||||
|
||||
if (sessionFilePath.endsWith('.jsonl')) {
|
||||
return getGeminiJsonlSessionMessages(sessionFilePath);
|
||||
}
|
||||
|
||||
return getGeminiLegacySessionMessages(sessionFilePath);
|
||||
}
|
||||
|
||||
export class GeminiSessionsProvider implements IProviderSessions {
|
||||
/**
|
||||
* Normalizes live Gemini stream-json events into the shared message shape.
|
||||
@@ -108,8 +346,7 @@ export class GeminiSessionsProvider implements IProviderSessions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads Gemini history from the in-memory session manager first, then falls
|
||||
* back to Gemini CLI session files on disk.
|
||||
* Loads Gemini history from Gemini CLI session files on disk.
|
||||
*/
|
||||
async fetchHistory(
|
||||
sessionId: string,
|
||||
@@ -117,28 +354,73 @@ export class GeminiSessionsProvider implements IProviderSessions {
|
||||
): Promise<FetchHistoryResult> {
|
||||
const { limit = null, offset = 0 } = options;
|
||||
|
||||
let rawMessages: AnyRecord[];
|
||||
let result: GeminiHistoryResult;
|
||||
try {
|
||||
rawMessages = sessionManager.getSessionMessages(sessionId) as AnyRecord[];
|
||||
|
||||
if (rawMessages.length === 0) {
|
||||
rawMessages = await getGeminiCliSessionMessages(sessionId) as AnyRecord[];
|
||||
}
|
||||
result = await getGeminiCliSessionMessages(sessionId);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[GeminiProvider] Failed to load session ${sessionId}:`, message);
|
||||
return { messages: [], total: 0, hasMore: false, offset: 0, limit: null };
|
||||
}
|
||||
|
||||
const rawMessages = result.messages;
|
||||
const normalized: NormalizedMessage[] = [];
|
||||
|
||||
for (let i = 0; i < rawMessages.length; i++) {
|
||||
const raw = rawMessages[i];
|
||||
const ts = raw.timestamp || new Date().toISOString();
|
||||
const baseId = raw.uuid || generateMessageId('gemini');
|
||||
|
||||
if (raw.type === 'thinking' || raw.isReasoning) {
|
||||
const thinkingContent = typeof raw.message?.content === 'string'
|
||||
? raw.message.content
|
||||
: typeof raw.content === 'string'
|
||||
? raw.content
|
||||
: '';
|
||||
|
||||
if (thinkingContent.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'thinking',
|
||||
content: thinkingContent,
|
||||
}));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_use' || raw.toolName) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_use',
|
||||
toolName: raw.toolName || 'Tool',
|
||||
toolInput: raw.toolInput,
|
||||
toolId: raw.toolCallId || baseId,
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (raw.type === 'tool_result') {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'tool_result',
|
||||
toolId: raw.toolCallId || '',
|
||||
content: raw.output === undefined ? '' : String(raw.output),
|
||||
isError: Boolean(raw.isError),
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
|
||||
const role = raw.message?.role || raw.role;
|
||||
const content = raw.message?.content || raw.content;
|
||||
|
||||
if (!role || !content) {
|
||||
continue;
|
||||
}
|
||||
@@ -147,8 +429,26 @@ export class GeminiSessionsProvider implements IProviderSessions {
|
||||
|
||||
if (Array.isArray(content)) {
|
||||
for (let partIdx = 0; partIdx < content.length; partIdx++) {
|
||||
const part = content[partIdx];
|
||||
if (part.type === 'text' && part.text) {
|
||||
const part = content[partIdx] as AnyRecord | string;
|
||||
|
||||
if (typeof part === 'string' && part.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: normalizedRole,
|
||||
content: part,
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!part || typeof part !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ((part.type === 'text' || !part.type) && typeof part.text === 'string' && part.text.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: `${baseId}_${partIdx}`,
|
||||
sessionId,
|
||||
@@ -192,6 +492,19 @@ export class GeminiSessionsProvider implements IProviderSessions {
|
||||
role: normalizedRole,
|
||||
content,
|
||||
}));
|
||||
} else {
|
||||
const textContent = extractGeminiTextContent(content);
|
||||
if (textContent.trim()) {
|
||||
normalized.push(createNormalizedMessage({
|
||||
id: baseId,
|
||||
sessionId,
|
||||
timestamp: ts,
|
||||
provider: PROVIDER,
|
||||
kind: 'text',
|
||||
role: normalizedRole,
|
||||
content: textContent,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -222,6 +535,7 @@ export class GeminiSessionsProvider implements IProviderSessions {
|
||||
hasMore: pageLimit === null ? false : start + pageLimit < normalized.length,
|
||||
offset: start,
|
||||
limit: pageLimit,
|
||||
tokenUsage: result.tokenUsage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { AbstractProvider } from '@/modules/providers/shared/base/abstract.provider.js';
|
||||
import { GeminiProviderAuth } from '@/modules/providers/list/gemini/gemini-auth.provider.js';
|
||||
import { GeminiMcpProvider } from '@/modules/providers/list/gemini/gemini-mcp.provider.js';
|
||||
import { GeminiSessionSynchronizer } from '@/modules/providers/list/gemini/gemini-session-synchronizer.provider.js';
|
||||
import { GeminiSessionsProvider } from '@/modules/providers/list/gemini/gemini-sessions.provider.js';
|
||||
import type { IProviderAuth, IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type { IProviderAuth, IProviderSessionSynchronizer, IProviderSessions } from '@/shared/interfaces.js';
|
||||
|
||||
export class GeminiProvider extends AbstractProvider {
|
||||
readonly mcp = new GeminiMcpProvider();
|
||||
readonly auth: IProviderAuth = new GeminiProviderAuth();
|
||||
readonly sessions: IProviderSessions = new GeminiSessionsProvider();
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer = new GeminiSessionSynchronizer();
|
||||
|
||||
constructor() {
|
||||
super('gemini');
|
||||
|
||||
@@ -2,6 +2,8 @@ import express, { type Request, type Response } from 'express';
|
||||
|
||||
import { providerAuthService } from '@/modules/providers/services/provider-auth.service.js';
|
||||
import { providerMcpService } from '@/modules/providers/services/mcp.service.js';
|
||||
import { sessionConversationsSearchService } from '@/modules/providers/services/session-conversations-search.service.js';
|
||||
import { sessionsService } from '@/modules/providers/services/sessions.service.js';
|
||||
import type { LLMProvider, McpScope, McpTransport, UpsertProviderMcpServerInput } from '@/shared/types.js';
|
||||
import { AppError, asyncHandler, createApiSuccessResponse } from '@/shared/utils.js';
|
||||
|
||||
@@ -25,6 +27,20 @@ const readPathParam = (value: unknown, name: string): string => {
|
||||
const normalizeProviderParam = (value: unknown): string =>
|
||||
readPathParam(value, 'provider').trim().toLowerCase();
|
||||
|
||||
const SESSION_ID_PATTERN = /^[a-zA-Z0-9._-]{1,120}$/;
|
||||
|
||||
const parseSessionId = (value: unknown): string => {
|
||||
const sessionId = readPathParam(value, 'sessionId').trim();
|
||||
if (!SESSION_ID_PATTERN.test(sessionId)) {
|
||||
throw new AppError('Invalid sessionId.', {
|
||||
code: 'INVALID_SESSION_ID',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return sessionId;
|
||||
};
|
||||
|
||||
const readOptionalQueryString = (value: unknown): string | undefined => {
|
||||
if (typeof value !== 'string') {
|
||||
return undefined;
|
||||
@@ -34,6 +50,29 @@ const readOptionalQueryString = (value: unknown): string | undefined => {
|
||||
return normalized.length > 0 ? normalized : undefined;
|
||||
};
|
||||
|
||||
const parseOptionalBooleanQuery = (value: unknown, name: string): boolean | undefined => {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = readOptionalQueryString(value);
|
||||
if (!normalized) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (normalized === 'true') {
|
||||
return true;
|
||||
}
|
||||
if (normalized === 'false') {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw new AppError(`${name} must be "true" or "false".`, {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
const parseMcpScope = (value: unknown): McpScope | undefined => {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
@@ -103,19 +142,19 @@ const parseMcpUpsertPayload = (payload: unknown): UpsertProviderMcpServerInput =
|
||||
args: Array.isArray(body.args) ? body.args.filter((entry): entry is string => typeof entry === 'string') : undefined,
|
||||
env: typeof body.env === 'object' && body.env !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.env as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
Object.entries(body.env as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
cwd: readOptionalQueryString(body.cwd),
|
||||
url: readOptionalQueryString(body.url),
|
||||
headers: typeof body.headers === 'object' && body.headers !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.headers as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
Object.entries(body.headers as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
envVars: Array.isArray(body.envVars)
|
||||
? body.envVars.filter((entry): entry is string => typeof entry === 'string')
|
||||
@@ -123,10 +162,10 @@ const parseMcpUpsertPayload = (payload: unknown): UpsertProviderMcpServerInput =
|
||||
bearerTokenEnvVar: readOptionalQueryString(body.bearerTokenEnvVar),
|
||||
envHttpHeaders: typeof body.envHttpHeaders === 'object' && body.envHttpHeaders !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.envHttpHeaders as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
Object.entries(body.envHttpHeaders as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
};
|
||||
};
|
||||
@@ -143,6 +182,62 @@ const parseProvider = (value: unknown): LLMProvider => {
|
||||
});
|
||||
};
|
||||
|
||||
const parseSessionRenameSummary = (payload: unknown): string => {
|
||||
if (!payload || typeof payload !== 'object') {
|
||||
throw new AppError('Request body must be an object.', {
|
||||
code: 'INVALID_REQUEST_BODY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const body = payload as Record<string, unknown>;
|
||||
const summary = typeof body.summary === 'string' ? body.summary.trim() : '';
|
||||
if (!summary) {
|
||||
throw new AppError('Summary is required.', {
|
||||
code: 'INVALID_SESSION_SUMMARY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (summary.length > 500) {
|
||||
throw new AppError('Summary must not exceed 500 characters.', {
|
||||
code: 'INVALID_SESSION_SUMMARY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return summary;
|
||||
};
|
||||
|
||||
const parseSessionSearchQuery = (value: unknown): string => {
|
||||
const query = readOptionalQueryString(value) ?? '';
|
||||
if (query.length < 2) {
|
||||
throw new AppError('Query must be at least 2 characters', {
|
||||
code: 'INVALID_SEARCH_QUERY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return query;
|
||||
};
|
||||
|
||||
const parseSessionSearchLimit = (value: unknown): number => {
|
||||
const raw = readOptionalQueryString(value);
|
||||
if (!raw) {
|
||||
return 50;
|
||||
}
|
||||
|
||||
const parsed = Number.parseInt(raw, 10);
|
||||
if (Number.isNaN(parsed)) {
|
||||
throw new AppError('limit must be a valid integer.', {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return Math.max(1, Math.min(parsed, 100));
|
||||
};
|
||||
|
||||
router.get(
|
||||
'/:provider/auth/status',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
@@ -152,6 +247,7 @@ router.get(
|
||||
}),
|
||||
);
|
||||
|
||||
// ----------------- MCP routes -----------------
|
||||
router.get(
|
||||
'/:provider/mcp/servers',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
@@ -214,4 +310,116 @@ router.post(
|
||||
}),
|
||||
);
|
||||
|
||||
// ----------------- Session routes -----------------
|
||||
router.delete(
|
||||
'/sessions/:sessionId',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = parseSessionId(req.params.sessionId);
|
||||
const deletedFromDisk = parseOptionalBooleanQuery(req.query.deletedFromDisk, 'deletedFromDisk') ?? false;
|
||||
const result = await sessionsService.deleteSessionById(sessionId, deletedFromDisk);
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/sessions/:sessionId',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = parseSessionId(req.params.sessionId);
|
||||
const summary = parseSessionRenameSummary(req.body);
|
||||
const result = sessionsService.renameSessionById(sessionId, summary);
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/sessions/:sessionId/messages',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = parseSessionId(req.params.sessionId);
|
||||
const limitRaw = readOptionalQueryString(req.query.limit);
|
||||
const offsetRaw = readOptionalQueryString(req.query.offset);
|
||||
|
||||
let limit: number | null = null;
|
||||
if (limitRaw !== undefined) {
|
||||
const parsedLimit = Number.parseInt(limitRaw, 10);
|
||||
if (Number.isNaN(parsedLimit) || parsedLimit < 0) {
|
||||
throw new AppError('limit must be a non-negative integer.', {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
limit = parsedLimit;
|
||||
}
|
||||
|
||||
let offset = 0;
|
||||
if (offsetRaw !== undefined) {
|
||||
const parsedOffset = Number.parseInt(offsetRaw, 10);
|
||||
if (Number.isNaN(parsedOffset) || parsedOffset < 0) {
|
||||
throw new AppError('offset must be a non-negative integer.', {
|
||||
code: 'INVALID_QUERY_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
offset = parsedOffset;
|
||||
}
|
||||
|
||||
const result = await sessionsService.fetchHistory(sessionId, {
|
||||
limit,
|
||||
offset,
|
||||
});
|
||||
res.json(result);
|
||||
}),
|
||||
);
|
||||
|
||||
router.get('/search/sessions', asyncHandler(async (req: Request, res: Response) => {
|
||||
const query = parseSessionSearchQuery(req.query.q);
|
||||
const limit = parseSessionSearchLimit(req.query.limit);
|
||||
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
});
|
||||
|
||||
let closed = false;
|
||||
const abortController = new AbortController();
|
||||
req.on('close', () => {
|
||||
closed = true;
|
||||
abortController.abort();
|
||||
});
|
||||
|
||||
try {
|
||||
await sessionConversationsSearchService.search({
|
||||
query,
|
||||
limit,
|
||||
signal: abortController.signal,
|
||||
onProgress: ({ projectResult, totalMatches, scannedProjects, totalProjects }) => {
|
||||
if (closed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (projectResult) {
|
||||
res.write(`event: result\ndata: ${JSON.stringify({ projectResult, totalMatches, scannedProjects, totalProjects })}\n\n`);
|
||||
return;
|
||||
}
|
||||
|
||||
res.write(`event: progress\ndata: ${JSON.stringify({ totalMatches, scannedProjects, totalProjects })}\n\n`);
|
||||
},
|
||||
});
|
||||
|
||||
if (!closed) {
|
||||
res.write('event: done\ndata: {}\n\n');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error searching conversations:', error);
|
||||
if (!closed) {
|
||||
res.write(`event: error\ndata: ${JSON.stringify({ error: 'Search failed' })}\n\n`);
|
||||
}
|
||||
} finally {
|
||||
if (!closed) {
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
export default router;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,74 @@
|
||||
import { scanStateDb } from '@/modules/database/index.js';
|
||||
import { providerRegistry } from '@/modules/providers/provider.registry.js';
|
||||
import type { LLMProvider } from '@/shared/types.js';
|
||||
|
||||
type SessionSynchronizeResult = {
|
||||
processedByProvider: Record<LLMProvider, number>;
|
||||
failures: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Orchestrates provider-specific session indexers and indexed-session lifecycle operations.
|
||||
*/
|
||||
export const sessionSynchronizerService = {
|
||||
/**
|
||||
* Runs all provider synchronizers and updates scan_state.last_scanned_at.
|
||||
*/
|
||||
async synchronizeSessions(): Promise<SessionSynchronizeResult> {
|
||||
const lastScanAt = scanStateDb.getLastScannedAt();
|
||||
const scanBoundary = new Date();
|
||||
const processedByProvider: Record<LLMProvider, number> = {
|
||||
claude: 0,
|
||||
codex: 0,
|
||||
cursor: 0,
|
||||
gemini: 0,
|
||||
};
|
||||
const failures: string[] = [];
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
providerRegistry.listProviders().map(async (provider) => ({
|
||||
provider: provider.id,
|
||||
processed: await provider.sessionSynchronizer.synchronize(lastScanAt ?? undefined),
|
||||
}))
|
||||
);
|
||||
|
||||
for (const result of results) {
|
||||
if (result.status === 'fulfilled') {
|
||||
processedByProvider[result.value.provider] = result.value.processed;
|
||||
continue;
|
||||
}
|
||||
|
||||
const reason = result.reason instanceof Error ? result.reason.message : String(result.reason);
|
||||
failures.push(reason);
|
||||
}
|
||||
|
||||
if (failures.length === 0) {
|
||||
scanStateDb.updateLastScannedAt(scanBoundary);
|
||||
} else {
|
||||
console.warn(
|
||||
`[Sessions] Skipping scan_state cursor advance because ${failures.length} provider sync(s) failed.`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
processedByProvider,
|
||||
failures,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Indexes one provider artifact file without running a full provider rescan.
|
||||
*/
|
||||
async synchronizeProviderFile(
|
||||
provider: LLMProvider,
|
||||
filePath: string
|
||||
): Promise<{ provider: LLMProvider; indexed: boolean; sessionId: string | null }> {
|
||||
const resolvedProvider = providerRegistry.resolveProvider(provider);
|
||||
const sessionId = await resolvedProvider.sessionSynchronizer.synchronizeFile(filePath);
|
||||
return {
|
||||
provider,
|
||||
indexed: Boolean(sessionId),
|
||||
sessionId,
|
||||
};
|
||||
},
|
||||
};
|
||||
283
server/modules/providers/services/sessions-watcher.service.ts
Normal file
283
server/modules/providers/services/sessions-watcher.service.ts
Normal file
@@ -0,0 +1,283 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
|
||||
import chokidar, { type FSWatcher } from 'chokidar';
|
||||
|
||||
import { sessionSynchronizerService } from '@/modules/providers/services/session-synchronizer.service.js';
|
||||
import { WS_OPEN_STATE, connectedClients } from '@/modules/websocket/index.js';
|
||||
import type { LLMProvider } from '@/shared/types.js';
|
||||
import { getProjectsWithSessions } from '@/modules/projects/index.js';
|
||||
|
||||
type WatcherEventType = 'add' | 'change';
|
||||
|
||||
const PROVIDER_WATCH_PATHS: Array<{ provider: LLMProvider; rootPath: string }> = [
|
||||
{
|
||||
provider: 'claude',
|
||||
rootPath: path.join(os.homedir(), '.claude', 'projects'),
|
||||
},
|
||||
{
|
||||
provider: 'cursor',
|
||||
rootPath: path.join(os.homedir(), '.cursor', 'chats'),
|
||||
},
|
||||
{
|
||||
provider: 'codex',
|
||||
rootPath: path.join(os.homedir(), '.codex', 'sessions'),
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
rootPath: path.join(os.homedir(), '.gemini', 'sessions'),
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
rootPath: path.join(os.homedir(), '.gemini', 'tmp'),
|
||||
},
|
||||
];
|
||||
|
||||
const WATCHER_IGNORED_PATTERNS = [
|
||||
'**/node_modules/**',
|
||||
'**/.git/**',
|
||||
'**/dist/**',
|
||||
'**/build/**',
|
||||
'**/*.tmp',
|
||||
'**/*.swp',
|
||||
'**/.DS_Store',
|
||||
];
|
||||
|
||||
const PROJECTS_UPDATE_DEBOUNCE_MS = 500;
|
||||
const PROJECTS_UPDATE_MAX_WAIT_MS = 2_000;
|
||||
|
||||
const watchers: FSWatcher[] = [];
|
||||
|
||||
type PendingWatcherUpdate = {
|
||||
providers: Set<LLMProvider>;
|
||||
changeTypes: Set<WatcherEventType>;
|
||||
updatedSessionIds: Set<string>;
|
||||
};
|
||||
|
||||
let pendingWatcherUpdate: PendingWatcherUpdate | null = null;
|
||||
let pendingWatcherUpdateStartedAt: number | null = null;
|
||||
let pendingWatcherFlushTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
let watcherRefreshInFlight = false;
|
||||
let watcherRescheduleAfterRefresh = false;
|
||||
|
||||
/**
|
||||
* Filters watcher events to provider-specific session artifact file types.
|
||||
*/
|
||||
function isWatcherTargetFile(provider: LLMProvider, filePath: string): boolean {
|
||||
if (provider === 'gemini') {
|
||||
return filePath.endsWith('.json') || filePath.endsWith('.jsonl');
|
||||
}
|
||||
|
||||
return filePath.endsWith('.jsonl');
|
||||
}
|
||||
|
||||
function clearPendingWatcherFlushTimer(): void {
|
||||
if (pendingWatcherFlushTimer) {
|
||||
clearTimeout(pendingWatcherFlushTimer);
|
||||
pendingWatcherFlushTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
function schedulePendingWatcherFlush(): void {
|
||||
if (!pendingWatcherUpdate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
if (pendingWatcherUpdateStartedAt === null) {
|
||||
pendingWatcherUpdateStartedAt = now;
|
||||
}
|
||||
|
||||
const elapsed = now - pendingWatcherUpdateStartedAt;
|
||||
const remainingMaxWait = Math.max(0, PROJECTS_UPDATE_MAX_WAIT_MS - elapsed);
|
||||
const delay = Math.min(PROJECTS_UPDATE_DEBOUNCE_MS, remainingMaxWait);
|
||||
|
||||
clearPendingWatcherFlushTimer();
|
||||
pendingWatcherFlushTimer = setTimeout(() => {
|
||||
void flushPendingWatcherUpdate();
|
||||
}, delay);
|
||||
}
|
||||
|
||||
function queuePendingWatcherUpdate(
|
||||
eventType: WatcherEventType,
|
||||
provider: LLMProvider,
|
||||
updatedSessionId: string | null
|
||||
): void {
|
||||
if (!pendingWatcherUpdate) {
|
||||
pendingWatcherUpdate = {
|
||||
providers: new Set<LLMProvider>(),
|
||||
changeTypes: new Set<WatcherEventType>(),
|
||||
updatedSessionIds: new Set<string>(),
|
||||
};
|
||||
}
|
||||
|
||||
pendingWatcherUpdate.providers.add(provider);
|
||||
pendingWatcherUpdate.changeTypes.add(eventType);
|
||||
if (updatedSessionId) {
|
||||
pendingWatcherUpdate.updatedSessionIds.add(updatedSessionId);
|
||||
}
|
||||
|
||||
schedulePendingWatcherFlush();
|
||||
}
|
||||
|
||||
async function flushPendingWatcherUpdate(): Promise<void> {
|
||||
clearPendingWatcherFlushTimer();
|
||||
|
||||
if (!pendingWatcherUpdate) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (watcherRefreshInFlight) {
|
||||
watcherRescheduleAfterRefresh = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const queuedUpdate = pendingWatcherUpdate;
|
||||
pendingWatcherUpdate = null;
|
||||
pendingWatcherUpdateStartedAt = null;
|
||||
watcherRefreshInFlight = true;
|
||||
|
||||
try {
|
||||
const updatedProjects = await getProjectsWithSessions({ skipSynchronization: true });
|
||||
const changeTypes = Array.from(queuedUpdate.changeTypes);
|
||||
const watchProviders = Array.from(queuedUpdate.providers);
|
||||
const updatedSessionIds = Array.from(queuedUpdate.updatedSessionIds);
|
||||
|
||||
// Backward-compatible fields stay populated with the first queued values.
|
||||
const updateMessage = JSON.stringify({
|
||||
type: 'projects_updated',
|
||||
projects: updatedProjects,
|
||||
timestamp: new Date().toISOString(),
|
||||
changeType: changeTypes[0] ?? 'change',
|
||||
updatedSessionId: updatedSessionIds[0] ?? undefined,
|
||||
watchProvider: watchProviders[0] ?? undefined,
|
||||
changeTypes,
|
||||
updatedSessionIds,
|
||||
watchProviders,
|
||||
batched: true,
|
||||
});
|
||||
|
||||
connectedClients.forEach(client => {
|
||||
if (client.readyState === WS_OPEN_STATE) {
|
||||
client.send(updateMessage);
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error('Session watcher refresh failed while broadcasting projects_updated', { error: message });
|
||||
} finally {
|
||||
watcherRefreshInFlight = false;
|
||||
|
||||
if (pendingWatcherUpdate || watcherRescheduleAfterRefresh) {
|
||||
watcherRescheduleAfterRefresh = false;
|
||||
schedulePendingWatcherFlush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles file watcher updates and triggers provider file-level synchronization.
|
||||
*/
|
||||
async function onUpdate(
|
||||
eventType: WatcherEventType,
|
||||
filePath: string,
|
||||
provider: LLMProvider
|
||||
): Promise<void> {
|
||||
if (!isWatcherTargetFile(provider, filePath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await sessionSynchronizerService.synchronizeProviderFile(provider, filePath);
|
||||
if (!result.indexed) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Session synchronization triggered by ${eventType} event for provider "${provider}"`, {
|
||||
filePath,
|
||||
sessionId: result.sessionId,
|
||||
});
|
||||
queuePendingWatcherUpdate(eventType, provider, result.sessionId);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Session watcher sync failed for provider "${provider}"`, {
|
||||
eventType,
|
||||
filePath,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts provider filesystem watchers and performs initial DB synchronization.
|
||||
*/
|
||||
export async function initializeSessionsWatcher(): Promise<void> {
|
||||
console.log('Setting up session watchers');
|
||||
|
||||
const initialSync = await sessionSynchronizerService.synchronizeSessions();
|
||||
console.log('Initial session synchronization complete', {
|
||||
processedByProvider: initialSync.processedByProvider,
|
||||
failures: initialSync.failures,
|
||||
});
|
||||
|
||||
for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) {
|
||||
try {
|
||||
await fsPromises.mkdir(rootPath, { recursive: true });
|
||||
|
||||
const watcher = chokidar.watch(rootPath, {
|
||||
ignored: WATCHER_IGNORED_PATTERNS,
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
followSymlinks: false,
|
||||
depth: 6,
|
||||
usePolling: true,
|
||||
interval: 6_000,
|
||||
binaryInterval: 6_000,
|
||||
});
|
||||
|
||||
watcher
|
||||
.on('add', (filePath: string) => {
|
||||
void onUpdate('add', filePath, provider);
|
||||
})
|
||||
.on('change', (filePath: string) => {
|
||||
void onUpdate('change', filePath, provider);
|
||||
})
|
||||
.on('error', (error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Session watcher error for provider "${provider}"`, { error: message });
|
||||
});
|
||||
|
||||
watchers.push(watcher);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Failed to initialize session watcher for provider "${provider}"`, {
|
||||
rootPath,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops all active provider session watchers.
|
||||
*/
|
||||
export async function closeSessionsWatcher(): Promise<void> {
|
||||
clearPendingWatcherFlushTimer();
|
||||
|
||||
await Promise.all(
|
||||
watchers.map(async (watcher) => {
|
||||
try {
|
||||
await watcher.close();
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error('Failed to close session watcher', { error: message });
|
||||
}
|
||||
})
|
||||
);
|
||||
watchers.length = 0;
|
||||
pendingWatcherUpdate = null;
|
||||
pendingWatcherUpdateStartedAt = null;
|
||||
watcherRefreshInFlight = false;
|
||||
watcherRescheduleAfterRefresh = false;
|
||||
}
|
||||
@@ -1,3 +1,6 @@
|
||||
import fsp from 'node:fs/promises';
|
||||
|
||||
import { sessionsDb } from '@/modules/database/index.js';
|
||||
import { providerRegistry } from '@/modules/providers/provider.registry.js';
|
||||
import type {
|
||||
FetchHistoryOptions,
|
||||
@@ -5,6 +8,23 @@ import type {
|
||||
LLMProvider,
|
||||
NormalizedMessage,
|
||||
} from '@/shared/types.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
/**
|
||||
* Removes one file if it exists.
|
||||
*/
|
||||
async function removeFileIfExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fsp.unlink(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Application service for provider-backed session message operations.
|
||||
@@ -33,13 +53,78 @@ export const sessionsService = {
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches normalized persisted session history for one provider/session pair.
|
||||
* Fetches persisted history by session id.
|
||||
*
|
||||
* Provider and provider-specific lookup hints are resolved from the indexed
|
||||
* session metadata in the database.
|
||||
*/
|
||||
fetchHistory(
|
||||
providerName: string,
|
||||
sessionId: string,
|
||||
options?: FetchHistoryOptions,
|
||||
options: Pick<FetchHistoryOptions, 'limit' | 'offset'> = {},
|
||||
): Promise<FetchHistoryResult> {
|
||||
return providerRegistry.resolveProvider(providerName).sessions.fetchHistory(sessionId, options);
|
||||
const session = sessionsDb.getSessionById(sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const provider = session.provider as LLMProvider;
|
||||
return providerRegistry.resolveProvider(provider).sessions.fetchHistory(sessionId, {
|
||||
limit: options.limit ?? null,
|
||||
offset: options.offset ?? 0,
|
||||
projectPath: session.project_path ?? '',
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes one persisted session row by id.
|
||||
*
|
||||
* When `deletedFromDisk` is true and a session `jsonl_path` exists, the path
|
||||
* is deleted from disk before the DB row is removed.
|
||||
*/
|
||||
async deleteSessionById(
|
||||
sessionId: string,
|
||||
deletedFromDisk = false,
|
||||
): Promise<{ sessionId: string; deletedFromDisk: boolean }> {
|
||||
const session = sessionsDb.getSessionById(sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
let removedFromDisk = false;
|
||||
if (deletedFromDisk && session.jsonl_path) {
|
||||
removedFromDisk = await removeFileIfExists(session.jsonl_path);
|
||||
}
|
||||
|
||||
const deleted = sessionsDb.deleteSessionById(sessionId);
|
||||
if (!deleted) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
return { sessionId, deletedFromDisk: removedFromDisk };
|
||||
},
|
||||
|
||||
/**
|
||||
* Renames one session by id without requiring the caller to pass provider.
|
||||
*/
|
||||
renameSessionById(sessionId: string, summary: string): { sessionId: string; summary: string } {
|
||||
const session = sessionsDb.getSessionById(sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
sessionsDb.updateSessionCustomName(sessionId, summary);
|
||||
return { sessionId, summary };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import type { IProvider, IProviderAuth, IProviderMcp, IProviderSessions } from '@/shared/interfaces.js';
|
||||
import type {
|
||||
IProvider,
|
||||
IProviderAuth,
|
||||
IProviderMcp,
|
||||
IProviderSessionSynchronizer,
|
||||
IProviderSessions,
|
||||
} from '@/shared/interfaces.js';
|
||||
import type { LLMProvider } from '@/shared/types.js';
|
||||
|
||||
/**
|
||||
@@ -13,6 +19,7 @@ export abstract class AbstractProvider implements IProvider {
|
||||
abstract readonly mcp: IProviderMcp;
|
||||
abstract readonly auth: IProviderAuth;
|
||||
abstract readonly sessions: IProviderSessions;
|
||||
abstract readonly sessionSynchronizer: IProviderSessionSynchronizer;
|
||||
|
||||
protected constructor(id: LLMProvider) {
|
||||
this.id = id;
|
||||
|
||||
267
server/modules/websocket/README.md
Normal file
267
server/modules/websocket/README.md
Normal file
@@ -0,0 +1,267 @@
|
||||
# WebSocket Module
|
||||
|
||||
This module owns the server-side WebSocket gateway used by:
|
||||
|
||||
1. Chat streaming (`/ws`)
|
||||
2. Interactive terminal sessions (`/shell`)
|
||||
3. Plugin WebSocket passthrough (`/plugin-ws/:pluginName`)
|
||||
|
||||
It is intentionally structured as **small services** plus a **barrel export** in `index.ts`.
|
||||
|
||||
## Public API
|
||||
|
||||
`server/modules/websocket/index.ts` exports:
|
||||
|
||||
1. `createWebSocketServer(server, dependencies)`
|
||||
Creates and wires the shared `ws` server.
|
||||
2. `connectedClients` and `WS_OPEN_STATE`
|
||||
Shared chat client registry and open-state constant used by other modules.
|
||||
|
||||
## Why Dependency Injection Is Used
|
||||
|
||||
The module receives runtime-specific functions from `server/index.js` instead of importing legacy runtime files directly.
|
||||
|
||||
Benefits:
|
||||
|
||||
1. Keeps module boundaries clean (`server/modules/*` architecture rule).
|
||||
2. Makes each service easier to test in isolation.
|
||||
3. Keeps WebSocket transport concerns separate from provider runtime concerns.
|
||||
|
||||
## Service Map
|
||||
|
||||
| File | Responsibility |
|
||||
|---|---|
|
||||
| `services/websocket-server.service.ts` | Creates `WebSocketServer`, binds `verifyClient`, routes connection by pathname |
|
||||
| `services/websocket-auth.service.ts` | Authenticates upgrade requests and attaches `request.user` |
|
||||
| `services/chat-websocket.service.ts` | Handles `/ws` chat protocol and provider command/session control messages |
|
||||
| `services/shell-websocket.service.ts` | Handles `/shell` PTY lifecycle, reconnect buffering, auth URL detection |
|
||||
| `services/plugin-websocket-proxy.service.ts` | Bridges client socket to plugin socket |
|
||||
| `services/websocket-writer.service.ts` | Adapts raw WebSocket to writer interface (`send`, `setSessionId`, `getSessionId`) |
|
||||
| `services/websocket-state.service.ts` | Holds shared chat client set and open-state constant |
|
||||
|
||||
## High-Level Architecture
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
A[HTTP Server] --> B[createWebSocketServer]
|
||||
B --> C[verifyWebSocketClient]
|
||||
B --> D{Pathname}
|
||||
D -->|/ws| E[handleChatConnection]
|
||||
D -->|/shell| F[handleShellConnection]
|
||||
D -->|/plugin-ws/:name| G[handlePluginWsProxy]
|
||||
D -->|other| H[close()]
|
||||
|
||||
E --> I[connectedClients Set]
|
||||
E --> J[WebSocketWriter]
|
||||
F --> K[ptySessionsMap]
|
||||
G --> L[Upstream Plugin ws://127.0.0.1:port/ws]
|
||||
|
||||
I --> M[projects.service broadcastProgress]
|
||||
I --> N[sessions-watcher.service projects_updated]
|
||||
```
|
||||
|
||||
## Connection Handshake + Routing
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Client
|
||||
participant WSS as WebSocketServer
|
||||
participant Auth as verifyWebSocketClient
|
||||
participant Router as connection router
|
||||
participant Chat as /ws handler
|
||||
participant Shell as /shell handler
|
||||
participant Proxy as /plugin-ws handler
|
||||
|
||||
Client->>WSS: Upgrade Request
|
||||
WSS->>Auth: verifyClient(info)
|
||||
alt Platform mode
|
||||
Auth->>Auth: authenticateWebSocket(null)
|
||||
Auth->>Auth: attach request.user
|
||||
else OSS mode
|
||||
Auth->>Auth: read token from ?token or Authorization
|
||||
Auth->>Auth: authenticateWebSocket(token)
|
||||
Auth->>Auth: attach request.user
|
||||
end
|
||||
|
||||
alt Auth failed
|
||||
Auth-->>WSS: false (reject handshake)
|
||||
else Auth ok
|
||||
Auth-->>WSS: true
|
||||
WSS->>Router: on("connection", ws, request)
|
||||
alt pathname == /ws
|
||||
Router->>Chat: handleChatConnection(ws, request, deps.chat)
|
||||
else pathname == /shell
|
||||
Router->>Shell: handleShellConnection(ws, deps.shell)
|
||||
else pathname startsWith /plugin-ws/
|
||||
Router->>Proxy: handlePluginWsProxy(ws, pathname, getPluginPort)
|
||||
else unknown
|
||||
Router->>Router: ws.close()
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
## `/ws` Chat Flow
|
||||
|
||||
When a chat socket connects:
|
||||
|
||||
1. Add socket to `connectedClients`.
|
||||
2. Build `WebSocketWriter` (captures `userId` from authenticated request).
|
||||
3. Parse each incoming message with `parseIncomingJsonObject`.
|
||||
4. Dispatch by `data.type`.
|
||||
5. On close, remove socket from `connectedClients`.
|
||||
|
||||
### Chat Message Dispatch
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A[Incoming WS message] --> B[parseIncomingJsonObject]
|
||||
B -->|invalid| C[send {type:error}]
|
||||
B -->|ok| D{data.type}
|
||||
|
||||
D -->|claude-command| E[queryClaudeSDK]
|
||||
D -->|cursor-command| F[spawnCursor]
|
||||
D -->|codex-command| G[queryCodex]
|
||||
D -->|gemini-command| H[spawnGemini]
|
||||
D -->|cursor-resume| I[spawnCursor resume]
|
||||
D -->|abort-session| J[abort by provider]
|
||||
D -->|claude-permission-response| K[resolveToolApproval]
|
||||
D -->|cursor-abort| L[abortCursorSession]
|
||||
D -->|check-session-status| M[is*SessionActive + optional reconnectSessionWriter]
|
||||
D -->|get-pending-permissions| N[getPendingApprovalsForSession]
|
||||
D -->|get-active-sessions| O[getActive*Sessions]
|
||||
```
|
||||
|
||||
### Chat Notes
|
||||
|
||||
1. `abort-session` returns a normalized `complete` message with `aborted: true`.
|
||||
2. `check-session-status` returns `{ type: "session-status", isProcessing }`.
|
||||
3. Claude status checks can reconnect output stream to the new socket via `reconnectSessionWriter`.
|
||||
|
||||
## `/shell` Terminal Flow
|
||||
|
||||
The shell handler manages persistent PTY sessions keyed by:
|
||||
|
||||
`<projectPath>_<sessionIdOrDefault>[_cmd_<hash>]`
|
||||
|
||||
This enables reconnect behavior and isolates command-specific plain-shell sessions.
|
||||
|
||||
### Shell Lifecycle
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> WaitingInit
|
||||
WaitingInit --> ValidateInit: message.type == init
|
||||
ValidateInit --> ReconnectExisting: session key exists and not login reset
|
||||
ValidateInit --> SpawnNewPTY: valid path + valid sessionId
|
||||
ValidateInit --> EmitError: invalid payload/path/sessionId
|
||||
|
||||
ReconnectExisting --> Running: attach ws, replay buffer
|
||||
SpawnNewPTY --> Running: pty.spawn + wire onData/onExit
|
||||
|
||||
Running --> Running: input -> pty.write
|
||||
Running --> Running: resize -> pty.resize
|
||||
Running --> Running: onData -> buffer + output + auth_url detection
|
||||
Running --> Exited: onExit
|
||||
Running --> Detached: ws close
|
||||
|
||||
Detached --> Running: reconnect before timeout
|
||||
Detached --> Killed: timeout reached -> pty.kill
|
||||
Exited --> [*]
|
||||
Killed --> [*]
|
||||
EmitError --> WaitingInit
|
||||
```
|
||||
|
||||
### Shell Behaviors in Detail
|
||||
|
||||
1. `init`:
|
||||
Reads `projectPath`, `sessionId`, `provider`, `hasSession`, `initialCommand`, `isPlainShell`.
|
||||
2. Login reset:
|
||||
For login-like commands, existing keyed PTY session is killed and recreated.
|
||||
3. Validation:
|
||||
Path must exist and be a directory; `sessionId` must match safe pattern.
|
||||
4. Command build:
|
||||
Provider-specific command construction with resume semantics.
|
||||
5. PTY output buffering:
|
||||
Stores up to 5000 chunks for replay on reconnect.
|
||||
6. URL detection:
|
||||
Strips ANSI, accumulates text buffer, extracts URLs, emits `auth_url` once per normalized URL, supports `autoOpen`.
|
||||
7. Close behavior:
|
||||
Socket disconnect does not instantly kill PTY; session is kept alive and terminated on timeout.
|
||||
|
||||
## `/plugin-ws/:pluginName` Proxy Flow
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Client
|
||||
participant Proxy as handlePluginWsProxy
|
||||
participant PM as getPluginPort
|
||||
participant Upstream as Plugin WS
|
||||
|
||||
Client->>Proxy: Connect /plugin-ws/:name
|
||||
Proxy->>Proxy: Validate pluginName regex
|
||||
alt Invalid name
|
||||
Proxy-->>Client: close(4400, "Invalid plugin name")
|
||||
else Valid
|
||||
Proxy->>PM: getPluginPort(name)
|
||||
alt Plugin not running
|
||||
Proxy-->>Client: close(4404, "Plugin not running")
|
||||
else Port found
|
||||
Proxy->>Upstream: new WebSocket(ws://127.0.0.1:port/ws)
|
||||
Client-->>Upstream: relay messages bidirectionally
|
||||
Upstream-->>Client: relay messages bidirectionally
|
||||
Upstream-->>Client: close propagation
|
||||
Client-->>Upstream: close propagation
|
||||
Upstream-->>Client: close(4502, "Upstream error") on upstream error
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
## Shared Client Registry and Broadcasts
|
||||
|
||||
Only chat sockets (`/ws`) are tracked in `connectedClients`.
|
||||
|
||||
That shared set is consumed by:
|
||||
|
||||
1. `modules/projects/services/projects-with-sessions-fetch.service.ts`
|
||||
Broadcasts `loading_progress` while project snapshots are being built.
|
||||
2. `modules/providers/services/sessions-watcher.service.ts`
|
||||
Broadcasts `projects_updated` when provider session artifacts change.
|
||||
|
||||
This design centralizes cross-module realtime fanout without requiring route-local references to WebSocket internals.
|
||||
|
||||
## Writer Adapter (`WebSocketWriter`)
|
||||
|
||||
`WebSocketWriter` normalizes chat transport behavior to match existing writer-style interfaces used elsewhere.
|
||||
|
||||
Methods:
|
||||
|
||||
1. `send(data)`
|
||||
JSON-serializes and sends only if socket is open.
|
||||
2. `setSessionId(sessionId)` / `getSessionId()`
|
||||
Supports provider session bookkeeping and resume flows.
|
||||
3. `updateWebSocket(newRawWs)`
|
||||
Allows active session stream redirection on reconnect.
|
||||
|
||||
## Error Handling and Close Codes
|
||||
|
||||
Current explicit close codes in this module:
|
||||
|
||||
1. `4400`: Invalid plugin name
|
||||
2. `4404`: Plugin not running
|
||||
3. `4502`: Upstream plugin WebSocket error
|
||||
|
||||
Other errors:
|
||||
|
||||
1. Chat handler catches and emits `{ type: "error", error }`.
|
||||
2. Shell handler catches and writes terminal-visible error output.
|
||||
3. Unknown websocket paths are closed immediately.
|
||||
|
||||
## Extending This Module
|
||||
|
||||
To add a new websocket route:
|
||||
|
||||
1. Add a new handler service under `services/`.
|
||||
2. Extend `WebSocketServerDependencies` in `websocket-server.service.ts` if needed.
|
||||
3. Add a new pathname branch in the router.
|
||||
4. Wire dependency injection from `server/index.js`.
|
||||
5. Keep `index.ts` as barrel-only export surface.
|
||||
2
server/modules/websocket/index.ts
Normal file
2
server/modules/websocket/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { WS_OPEN_STATE, connectedClients } from './services/websocket-state.service.js';
|
||||
export { createWebSocketServer } from './services/websocket-server.service.js';
|
||||
271
server/modules/websocket/services/chat-websocket.service.ts
Normal file
271
server/modules/websocket/services/chat-websocket.service.ts
Normal file
@@ -0,0 +1,271 @@
|
||||
import type { WebSocket } from 'ws';
|
||||
|
||||
import { connectedClients } from '@/modules/websocket/services/websocket-state.service.js';
|
||||
import { WebSocketWriter } from '@/modules/websocket/services/websocket-writer.service.js';
|
||||
import type {
|
||||
AnyRecord,
|
||||
AuthenticatedWebSocketRequest,
|
||||
LLMProvider,
|
||||
} from '@/shared/types.js';
|
||||
import { createNormalizedMessage, parseIncomingJsonObject } from '@/shared/utils.js';
|
||||
|
||||
type ChatIncomingMessage = AnyRecord & {
|
||||
type?: string;
|
||||
command?: string;
|
||||
options?: AnyRecord;
|
||||
provider?: string;
|
||||
sessionId?: string;
|
||||
requestId?: string;
|
||||
allow?: unknown;
|
||||
updatedInput?: unknown;
|
||||
message?: unknown;
|
||||
rememberEntry?: unknown;
|
||||
};
|
||||
|
||||
const DEFAULT_PROVIDER: LLMProvider = 'claude';
|
||||
|
||||
type ChatWebSocketDependencies = {
|
||||
queryClaudeSDK: (command: string, options: unknown, writer: WebSocketWriter) => Promise<unknown>;
|
||||
spawnCursor: (command: string, options: unknown, writer: WebSocketWriter) => Promise<unknown>;
|
||||
queryCodex: (command: string, options: unknown, writer: WebSocketWriter) => Promise<unknown>;
|
||||
spawnGemini: (command: string, options: unknown, writer: WebSocketWriter) => Promise<unknown>;
|
||||
abortClaudeSDKSession: (sessionId: string) => Promise<boolean>;
|
||||
abortCursorSession: (sessionId: string) => boolean;
|
||||
abortCodexSession: (sessionId: string) => boolean;
|
||||
abortGeminiSession: (sessionId: string) => boolean;
|
||||
resolveToolApproval: (
|
||||
requestId: string,
|
||||
payload: {
|
||||
allow: boolean;
|
||||
updatedInput?: unknown;
|
||||
message?: string;
|
||||
rememberEntry?: unknown;
|
||||
}
|
||||
) => void;
|
||||
isClaudeSDKSessionActive: (sessionId: string) => boolean;
|
||||
isCursorSessionActive: (sessionId: string) => boolean;
|
||||
isCodexSessionActive: (sessionId: string) => boolean;
|
||||
isGeminiSessionActive: (sessionId: string) => boolean;
|
||||
reconnectSessionWriter: (sessionId: string, ws: WebSocket) => boolean;
|
||||
getPendingApprovalsForSession: (sessionId: string) => unknown[];
|
||||
getActiveClaudeSDKSessions: () => unknown;
|
||||
getActiveCursorSessions: () => unknown;
|
||||
getActiveCodexSessions: () => unknown;
|
||||
getActiveGeminiSessions: () => unknown;
|
||||
};
|
||||
|
||||
/**
|
||||
* Normalizes potentially invalid provider names coming from websocket payloads.
|
||||
*/
|
||||
function readProvider(value: unknown): LLMProvider {
|
||||
if (value === 'claude' || value === 'cursor' || value === 'codex' || value === 'gemini') {
|
||||
return value;
|
||||
}
|
||||
|
||||
return DEFAULT_PROVIDER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the authenticated request user id in the formats currently produced
|
||||
* by platform and OSS auth code paths.
|
||||
*/
|
||||
function readRequestUserId(
|
||||
request: AuthenticatedWebSocketRequest | undefined
|
||||
): string | number | null {
|
||||
const user = request?.user;
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof user.id === 'string' || typeof user.id === 'number') {
|
||||
return user.id;
|
||||
}
|
||||
|
||||
if (typeof user.userId === 'string' || typeof user.userId === 'number') {
|
||||
return user.userId;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles authenticated chat websocket messages used by the main chat panel.
|
||||
*/
|
||||
export function handleChatConnection(
|
||||
ws: WebSocket,
|
||||
request: AuthenticatedWebSocketRequest,
|
||||
dependencies: ChatWebSocketDependencies
|
||||
): void {
|
||||
console.log('[INFO] Chat WebSocket connected');
|
||||
connectedClients.add(ws);
|
||||
|
||||
const writer = new WebSocketWriter(ws, readRequestUserId(request));
|
||||
|
||||
ws.on('message', async (rawMessage) => {
|
||||
try {
|
||||
const parsed = parseIncomingJsonObject(rawMessage);
|
||||
if (!parsed) {
|
||||
throw new Error('Invalid websocket payload');
|
||||
}
|
||||
|
||||
const data = parsed as ChatIncomingMessage;
|
||||
const messageType = data.type;
|
||||
if (!messageType) {
|
||||
throw new Error('Message type is required');
|
||||
}
|
||||
|
||||
if (messageType === 'claude-command') {
|
||||
await dependencies.queryClaudeSDK(data.command ?? '', data.options, writer);
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'cursor-command') {
|
||||
await dependencies.spawnCursor(data.command ?? '', data.options, writer);
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'codex-command') {
|
||||
await dependencies.queryCodex(data.command ?? '', data.options, writer);
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'gemini-command') {
|
||||
await dependencies.spawnGemini(data.command ?? '', data.options, writer);
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'cursor-resume') {
|
||||
await dependencies.spawnCursor(
|
||||
'',
|
||||
{
|
||||
sessionId: data.sessionId,
|
||||
resume: true,
|
||||
cwd: data.options?.cwd,
|
||||
},
|
||||
writer
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'abort-session') {
|
||||
const provider = readProvider(data.provider);
|
||||
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : '';
|
||||
let success = false;
|
||||
|
||||
if (provider === 'cursor') {
|
||||
success = dependencies.abortCursorSession(sessionId);
|
||||
} else if (provider === 'codex') {
|
||||
success = dependencies.abortCodexSession(sessionId);
|
||||
} else if (provider === 'gemini') {
|
||||
success = dependencies.abortGeminiSession(sessionId);
|
||||
} else {
|
||||
success = await dependencies.abortClaudeSDKSession(sessionId);
|
||||
}
|
||||
|
||||
writer.send(
|
||||
createNormalizedMessage({
|
||||
kind: 'complete',
|
||||
exitCode: success ? 0 : 1,
|
||||
aborted: true,
|
||||
success,
|
||||
sessionId,
|
||||
provider,
|
||||
})
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'claude-permission-response') {
|
||||
if (typeof data.requestId === 'string' && data.requestId.length > 0) {
|
||||
dependencies.resolveToolApproval(data.requestId, {
|
||||
allow: Boolean(data.allow),
|
||||
updatedInput: data.updatedInput,
|
||||
message: typeof data.message === 'string' ? data.message : undefined,
|
||||
rememberEntry: data.rememberEntry,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'cursor-abort') {
|
||||
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : '';
|
||||
const success = dependencies.abortCursorSession(sessionId);
|
||||
writer.send(
|
||||
createNormalizedMessage({
|
||||
kind: 'complete',
|
||||
exitCode: success ? 0 : 1,
|
||||
aborted: true,
|
||||
success,
|
||||
sessionId,
|
||||
provider: 'cursor',
|
||||
})
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'check-session-status') {
|
||||
const provider = readProvider(data.provider);
|
||||
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : '';
|
||||
let isActive = false;
|
||||
|
||||
if (provider === 'cursor') {
|
||||
isActive = dependencies.isCursorSessionActive(sessionId);
|
||||
} else if (provider === 'codex') {
|
||||
isActive = dependencies.isCodexSessionActive(sessionId);
|
||||
} else if (provider === 'gemini') {
|
||||
isActive = dependencies.isGeminiSessionActive(sessionId);
|
||||
} else {
|
||||
isActive = dependencies.isClaudeSDKSessionActive(sessionId);
|
||||
if (isActive) {
|
||||
dependencies.reconnectSessionWriter(sessionId, ws);
|
||||
}
|
||||
}
|
||||
|
||||
writer.send({
|
||||
type: 'session-status',
|
||||
sessionId,
|
||||
provider,
|
||||
isProcessing: isActive,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'get-pending-permissions') {
|
||||
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : '';
|
||||
if (sessionId && dependencies.isClaudeSDKSessionActive(sessionId)) {
|
||||
const pending = dependencies.getPendingApprovalsForSession(sessionId);
|
||||
writer.send({
|
||||
type: 'pending-permissions-response',
|
||||
sessionId,
|
||||
data: pending,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (messageType === 'get-active-sessions') {
|
||||
writer.send({
|
||||
type: 'active-sessions',
|
||||
sessions: {
|
||||
claude: dependencies.getActiveClaudeSDKSessions(),
|
||||
cursor: dependencies.getActiveCursorSessions(),
|
||||
codex: dependencies.getActiveCodexSessions(),
|
||||
gemini: dependencies.getActiveGeminiSessions(),
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error('[ERROR] Chat WebSocket error:', message);
|
||||
writer.send({
|
||||
type: 'error',
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
console.log('[INFO] Chat client disconnected');
|
||||
connectedClients.delete(ws);
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
import { WebSocket } from 'ws';
|
||||
|
||||
/**
|
||||
* Proxies an authenticated client websocket to a plugin websocket endpoint.
|
||||
*/
|
||||
export function handlePluginWsProxy(
|
||||
clientWs: WebSocket,
|
||||
pathname: string,
|
||||
getPluginPort: (pluginName: string) => number | null
|
||||
): void {
|
||||
const pluginName = pathname.replace('/plugin-ws/', '');
|
||||
if (!pluginName || /[^a-zA-Z0-9_-]/.test(pluginName)) {
|
||||
clientWs.close(4400, 'Invalid plugin name');
|
||||
return;
|
||||
}
|
||||
|
||||
const port = getPluginPort(pluginName);
|
||||
if (!port) {
|
||||
clientWs.close(4404, 'Plugin not running');
|
||||
return;
|
||||
}
|
||||
|
||||
const upstream = new WebSocket(`ws://127.0.0.1:${port}/ws`);
|
||||
|
||||
upstream.on('open', () => {
|
||||
console.log(`[Plugins] WS proxy connected to "${pluginName}" on port ${port}`);
|
||||
});
|
||||
|
||||
upstream.on('message', (data) => {
|
||||
if (clientWs.readyState === WebSocket.OPEN) {
|
||||
clientWs.send(data);
|
||||
}
|
||||
});
|
||||
|
||||
clientWs.on('message', (data) => {
|
||||
if (upstream.readyState === WebSocket.OPEN) {
|
||||
upstream.send(data);
|
||||
}
|
||||
});
|
||||
|
||||
upstream.on('close', () => {
|
||||
if (clientWs.readyState === WebSocket.OPEN) {
|
||||
clientWs.close();
|
||||
}
|
||||
});
|
||||
|
||||
clientWs.on('close', () => {
|
||||
if (upstream.readyState === WebSocket.OPEN) {
|
||||
upstream.close();
|
||||
}
|
||||
});
|
||||
|
||||
upstream.on('error', (error) => {
|
||||
console.error(`[Plugins] WS proxy error for "${pluginName}":`, error.message);
|
||||
if (clientWs.readyState === WebSocket.OPEN) {
|
||||
clientWs.close(4502, 'Upstream error');
|
||||
}
|
||||
});
|
||||
|
||||
clientWs.on('error', () => {
|
||||
if (upstream.readyState === WebSocket.OPEN) {
|
||||
upstream.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
453
server/modules/websocket/services/shell-websocket.service.ts
Normal file
453
server/modules/websocket/services/shell-websocket.service.ts
Normal file
@@ -0,0 +1,453 @@
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import pty, { type IPty } from 'node-pty';
|
||||
import { WebSocket, type RawData } from 'ws';
|
||||
|
||||
import { parseIncomingJsonObject } from '@/shared/utils.js';
|
||||
|
||||
type ShellIncomingMessage = {
|
||||
type?: string;
|
||||
data?: string;
|
||||
cols?: number;
|
||||
rows?: number;
|
||||
projectPath?: string;
|
||||
sessionId?: string;
|
||||
hasSession?: boolean;
|
||||
provider?: string;
|
||||
initialCommand?: string;
|
||||
isPlainShell?: boolean;
|
||||
};
|
||||
|
||||
type PtySessionEntry = {
|
||||
pty: IPty;
|
||||
ws: WebSocket | null;
|
||||
buffer: string[];
|
||||
timeoutId: NodeJS.Timeout | null;
|
||||
projectPath: string;
|
||||
sessionId: string | null;
|
||||
};
|
||||
|
||||
const ptySessionsMap = new Map<string, PtySessionEntry>();
|
||||
const PTY_SESSION_TIMEOUT = 30 * 60 * 1000;
|
||||
const SHELL_URL_PARSE_BUFFER_LIMIT = 32768;
|
||||
|
||||
type ShellWebSocketDependencies = {
|
||||
getSessionById: (sessionId: string) => { cliSessionId?: string } | null | undefined;
|
||||
stripAnsiSequences: (content: string) => string;
|
||||
normalizeDetectedUrl: (url: string) => string | null;
|
||||
extractUrlsFromText: (content: string) => string[];
|
||||
shouldAutoOpenUrlFromOutput: (content: string) => boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads a string field from untyped payloads and falls back when absent.
|
||||
*/
|
||||
function readString(value: unknown, fallback = ''): string {
|
||||
return typeof value === 'string' ? value : fallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a boolean field from untyped payloads and falls back when absent.
|
||||
*/
|
||||
function readBoolean(value: unknown, fallback = false): boolean {
|
||||
return typeof value === 'boolean' ? value : fallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a finite number field from untyped payloads and falls back when absent.
|
||||
*/
|
||||
function readNumber(value: unknown, fallback: number): number {
|
||||
return typeof value === 'number' && Number.isFinite(value) ? value : fallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses incoming websocket shell messages and keeps processing safe when
|
||||
* malformed payloads are received.
|
||||
*/
|
||||
function parseShellMessage(rawMessage: RawData): ShellIncomingMessage | null {
|
||||
const payload = parseIncomingJsonObject(rawMessage);
|
||||
if (!payload) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return payload as ShellIncomingMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves provider command line for plain shell and agent-backed shell modes.
|
||||
*/
|
||||
function buildShellCommand(
|
||||
message: ShellIncomingMessage,
|
||||
dependencies: ShellWebSocketDependencies
|
||||
): string {
|
||||
const hasSession = readBoolean(message.hasSession);
|
||||
const sessionId = readString(message.sessionId);
|
||||
const initialCommand = readString(message.initialCommand);
|
||||
const provider = readString(message.provider, 'claude');
|
||||
const safeSessionIdPattern = /^[a-zA-Z0-9_.\-:]+$/;
|
||||
const isPlainShell =
|
||||
readBoolean(message.isPlainShell) ||
|
||||
(!!initialCommand && !hasSession) ||
|
||||
provider === 'plain-shell';
|
||||
|
||||
if (isPlainShell) {
|
||||
return initialCommand;
|
||||
}
|
||||
|
||||
if (provider === 'cursor') {
|
||||
if (hasSession && sessionId) {
|
||||
return `cursor-agent --resume="${sessionId}"`;
|
||||
}
|
||||
return 'cursor-agent';
|
||||
}
|
||||
|
||||
if (provider === 'codex') {
|
||||
if (hasSession && sessionId) {
|
||||
if (os.platform() === 'win32') {
|
||||
return `codex resume "${sessionId}"; if ($LASTEXITCODE -ne 0) { codex }`;
|
||||
}
|
||||
return `codex resume "${sessionId}" || codex`;
|
||||
}
|
||||
return 'codex';
|
||||
}
|
||||
|
||||
if (provider === 'gemini') {
|
||||
const command = initialCommand || 'gemini';
|
||||
let resumeId = sessionId;
|
||||
if (hasSession && sessionId) {
|
||||
try {
|
||||
const existingSession = dependencies.getSessionById(sessionId);
|
||||
if (existingSession && existingSession.cliSessionId) {
|
||||
resumeId = existingSession.cliSessionId;
|
||||
if (!safeSessionIdPattern.test(resumeId)) {
|
||||
resumeId = '';
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to get Gemini CLI session ID:', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasSession && resumeId) {
|
||||
return `${command} --resume "${resumeId}"`;
|
||||
}
|
||||
return command;
|
||||
}
|
||||
|
||||
const command = initialCommand || 'claude';
|
||||
if (hasSession && sessionId) {
|
||||
if (os.platform() === 'win32') {
|
||||
return `claude --resume "${sessionId}"; if ($LASTEXITCODE -ne 0) { claude }`;
|
||||
}
|
||||
return `claude --resume "${sessionId}" || claude`;
|
||||
}
|
||||
return command;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles websocket connections used by the standalone shell terminal UI.
|
||||
*/
|
||||
export function handleShellConnection(
|
||||
ws: WebSocket,
|
||||
dependencies: ShellWebSocketDependencies
|
||||
): void {
|
||||
console.log('[INFO] Shell websocket connected');
|
||||
|
||||
let shellProcess: IPty | null = null;
|
||||
let ptySessionKey: string | null = null;
|
||||
let urlDetectionBuffer = '';
|
||||
const announcedAuthUrls = new Set<string>();
|
||||
|
||||
ws.on('message', async (rawMessage) => {
|
||||
try {
|
||||
const data = parseShellMessage(rawMessage);
|
||||
if (!data?.type) {
|
||||
throw new Error('Invalid websocket payload');
|
||||
}
|
||||
|
||||
if (data.type === 'init') {
|
||||
const projectPath = readString(data.projectPath, process.cwd());
|
||||
const sessionId = readString(data.sessionId) || null;
|
||||
const hasSession = readBoolean(data.hasSession);
|
||||
const provider = readString(data.provider, 'claude');
|
||||
const initialCommand = readString(data.initialCommand);
|
||||
const isPlainShell =
|
||||
readBoolean(data.isPlainShell) ||
|
||||
(!!initialCommand && !hasSession) ||
|
||||
provider === 'plain-shell';
|
||||
|
||||
urlDetectionBuffer = '';
|
||||
announcedAuthUrls.clear();
|
||||
|
||||
const isLoginCommand =
|
||||
!!initialCommand &&
|
||||
(initialCommand.includes('setup-token') ||
|
||||
initialCommand.includes('cursor-agent login') ||
|
||||
initialCommand.includes('auth login'));
|
||||
|
||||
const commandSuffix =
|
||||
isPlainShell && initialCommand
|
||||
? `_cmd_${Buffer.from(initialCommand).toString('base64').slice(0, 16)}`
|
||||
: '';
|
||||
ptySessionKey = `${projectPath}_${sessionId ?? 'default'}${commandSuffix}`;
|
||||
|
||||
if (isLoginCommand) {
|
||||
const oldSession = ptySessionsMap.get(ptySessionKey);
|
||||
if (oldSession) {
|
||||
if (oldSession.timeoutId) {
|
||||
clearTimeout(oldSession.timeoutId);
|
||||
}
|
||||
oldSession.pty.kill();
|
||||
ptySessionsMap.delete(ptySessionKey);
|
||||
}
|
||||
}
|
||||
|
||||
const existingSession = isLoginCommand ? null : ptySessionsMap.get(ptySessionKey);
|
||||
if (existingSession) {
|
||||
shellProcess = existingSession.pty;
|
||||
if (existingSession.timeoutId) {
|
||||
clearTimeout(existingSession.timeoutId);
|
||||
}
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'output',
|
||||
data: '\x1b[36m[Reconnected to existing session]\x1b[0m\r\n',
|
||||
})
|
||||
);
|
||||
|
||||
if (existingSession.buffer.length > 0) {
|
||||
existingSession.buffer.forEach((bufferedData) => {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'output',
|
||||
data: bufferedData,
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
existingSession.ws = ws;
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedProjectPath = path.resolve(projectPath);
|
||||
try {
|
||||
const stats = fs.statSync(resolvedProjectPath);
|
||||
if (!stats.isDirectory()) {
|
||||
throw new Error('Not a directory');
|
||||
}
|
||||
} catch {
|
||||
ws.send(JSON.stringify({ type: 'error', message: 'Invalid project path' }));
|
||||
return;
|
||||
}
|
||||
|
||||
const safeSessionIdPattern = /^[a-zA-Z0-9_.\-:]+$/;
|
||||
if (sessionId && !safeSessionIdPattern.test(sessionId)) {
|
||||
ws.send(JSON.stringify({ type: 'error', message: 'Invalid session ID' }));
|
||||
return;
|
||||
}
|
||||
|
||||
const shellCommand = buildShellCommand(data, dependencies);
|
||||
const shell = os.platform() === 'win32' ? 'powershell.exe' : 'bash';
|
||||
const shellArgs =
|
||||
os.platform() === 'win32' ? ['-Command', shellCommand] : ['-c', shellCommand];
|
||||
const termCols = readNumber(data.cols, 80);
|
||||
const termRows = readNumber(data.rows, 24);
|
||||
|
||||
shellProcess = pty.spawn(shell, shellArgs, {
|
||||
name: 'xterm-256color',
|
||||
cols: termCols,
|
||||
rows: termRows,
|
||||
cwd: resolvedProjectPath,
|
||||
env: {
|
||||
...process.env,
|
||||
TERM: 'xterm-256color',
|
||||
COLORTERM: 'truecolor',
|
||||
FORCE_COLOR: '3',
|
||||
},
|
||||
});
|
||||
|
||||
ptySessionsMap.set(ptySessionKey, {
|
||||
pty: shellProcess,
|
||||
ws,
|
||||
buffer: [],
|
||||
timeoutId: null,
|
||||
projectPath,
|
||||
sessionId,
|
||||
});
|
||||
|
||||
shellProcess.onData((chunk) => {
|
||||
if (!ptySessionKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
const session = ptySessionsMap.get(ptySessionKey);
|
||||
if (!session) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (session.buffer.length < 5000) {
|
||||
session.buffer.push(chunk);
|
||||
} else {
|
||||
session.buffer.shift();
|
||||
session.buffer.push(chunk);
|
||||
}
|
||||
|
||||
if (session.ws && session.ws.readyState === WebSocket.OPEN) {
|
||||
let outputData = chunk;
|
||||
const cleanChunk = dependencies.stripAnsiSequences(chunk);
|
||||
urlDetectionBuffer = `${urlDetectionBuffer}${cleanChunk}`.slice(-SHELL_URL_PARSE_BUFFER_LIMIT);
|
||||
|
||||
outputData = outputData.replace(
|
||||
/OPEN_URL:\s*(https?:\/\/[^\s\x1b\x07]+)/g,
|
||||
'[INFO] Opening in browser: $1'
|
||||
);
|
||||
|
||||
const emitAuthUrl = (detectedUrl: string, autoOpen = false) => {
|
||||
const normalizedUrl = dependencies.normalizeDetectedUrl(detectedUrl);
|
||||
if (!normalizedUrl) {
|
||||
return;
|
||||
}
|
||||
|
||||
const isNewUrl = !announcedAuthUrls.has(normalizedUrl);
|
||||
if (isNewUrl) {
|
||||
announcedAuthUrls.add(normalizedUrl);
|
||||
session.ws?.send(
|
||||
JSON.stringify({
|
||||
type: 'auth_url',
|
||||
url: normalizedUrl,
|
||||
autoOpen,
|
||||
})
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const normalizedDetectedUrls = dependencies.extractUrlsFromText(urlDetectionBuffer)
|
||||
.map((url) => dependencies.normalizeDetectedUrl(url))
|
||||
.filter((url): url is string => Boolean(url));
|
||||
|
||||
const dedupedDetectedUrls = Array.from(new Set(normalizedDetectedUrls)).filter(
|
||||
(url, _, urls) =>
|
||||
!urls.some((otherUrl) => otherUrl !== url && otherUrl.startsWith(url))
|
||||
);
|
||||
|
||||
dedupedDetectedUrls.forEach((url) => emitAuthUrl(url, false));
|
||||
|
||||
if (
|
||||
dependencies.shouldAutoOpenUrlFromOutput(cleanChunk) &&
|
||||
dedupedDetectedUrls.length > 0
|
||||
) {
|
||||
const bestUrl = dedupedDetectedUrls.reduce((longest, current) =>
|
||||
current.length > longest.length ? current : longest
|
||||
);
|
||||
emitAuthUrl(bestUrl, true);
|
||||
}
|
||||
|
||||
session.ws.send(
|
||||
JSON.stringify({
|
||||
type: 'output',
|
||||
data: outputData,
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
shellProcess.onExit((exitCode) => {
|
||||
if (!ptySessionKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
const session = ptySessionsMap.get(ptySessionKey);
|
||||
if (session && session.ws && session.ws.readyState === WebSocket.OPEN) {
|
||||
session.ws.send(
|
||||
JSON.stringify({
|
||||
type: 'output',
|
||||
data: `\r\n\x1b[33mProcess exited with code ${exitCode.exitCode}${
|
||||
exitCode.signal != null ? ` (${exitCode.signal})` : ''
|
||||
}\x1b[0m\r\n`,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (session?.timeoutId) {
|
||||
clearTimeout(session.timeoutId);
|
||||
}
|
||||
|
||||
ptySessionsMap.delete(ptySessionKey);
|
||||
shellProcess = null;
|
||||
});
|
||||
|
||||
let welcomeMsg = `\x1b[36mStarting terminal in: ${projectPath}\x1b[0m\r\n`;
|
||||
if (!isPlainShell) {
|
||||
const providerName =
|
||||
provider === 'cursor'
|
||||
? 'Cursor'
|
||||
: provider === 'codex'
|
||||
? 'Codex'
|
||||
: provider === 'gemini'
|
||||
? 'Gemini'
|
||||
: 'Claude';
|
||||
welcomeMsg = hasSession
|
||||
? `\x1b[36mResuming ${providerName} session ${sessionId} in: ${projectPath}\x1b[0m\r\n`
|
||||
: `\x1b[36mStarting new ${providerName} session in: ${projectPath}\x1b[0m\r\n`;
|
||||
}
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'output',
|
||||
data: welcomeMsg,
|
||||
})
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.type === 'input') {
|
||||
if (shellProcess) {
|
||||
shellProcess.write(readString(data.data));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.type === 'resize') {
|
||||
if (shellProcess) {
|
||||
shellProcess.resize(readNumber(data.cols, 80), readNumber(data.rows, 24));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error('[ERROR] Shell WebSocket error:', message);
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'output',
|
||||
data: `\r\n\x1b[31mError: ${message}\x1b[0m\r\n`,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
if (!ptySessionKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
const session = ptySessionsMap.get(ptySessionKey);
|
||||
if (!session) {
|
||||
return;
|
||||
}
|
||||
|
||||
session.ws = null;
|
||||
session.timeoutId = setTimeout(() => {
|
||||
session.pty.kill();
|
||||
ptySessionsMap.delete(ptySessionKey as string);
|
||||
}, PTY_SESSION_TIMEOUT);
|
||||
});
|
||||
|
||||
ws.on('error', (error) => {
|
||||
console.error('[ERROR] Shell WebSocket error:', error);
|
||||
});
|
||||
}
|
||||
54
server/modules/websocket/services/websocket-auth.service.ts
Normal file
54
server/modules/websocket/services/websocket-auth.service.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import type { VerifyClientCallbackSync } from 'ws';
|
||||
|
||||
import type { AuthenticatedWebSocketRequest } from '@/shared/types.js';
|
||||
|
||||
type WebSocketAuthDependencies = {
|
||||
isPlatform: boolean;
|
||||
authenticateWebSocket: (token: string | null) => {
|
||||
id?: string | number;
|
||||
userId?: string | number;
|
||||
username?: string;
|
||||
[key: string]: unknown;
|
||||
} | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Authenticates websocket upgrade requests before the `connection` handler runs.
|
||||
*/
|
||||
export function verifyWebSocketClient(
|
||||
info: Parameters<VerifyClientCallbackSync<AuthenticatedWebSocketRequest>>[0],
|
||||
dependencies: WebSocketAuthDependencies
|
||||
): boolean {
|
||||
const request = info.req as AuthenticatedWebSocketRequest;
|
||||
console.log('WebSocket connection attempt to:', request.url);
|
||||
|
||||
// Platform mode: use the first DB user and skip token checks.
|
||||
if (dependencies.isPlatform) {
|
||||
const user = dependencies.authenticateWebSocket(null);
|
||||
if (!user) {
|
||||
console.log('[WARN] Platform mode: No user found in database');
|
||||
return false;
|
||||
}
|
||||
|
||||
request.user = user;
|
||||
console.log('[OK] Platform mode WebSocket authenticated for user:', user.username);
|
||||
return true;
|
||||
}
|
||||
|
||||
// OSS mode: read JWT from query string first, then Authorization header.
|
||||
const upgradeUrl = new URL(request.url ?? '/', 'http://localhost');
|
||||
const token =
|
||||
upgradeUrl.searchParams.get('token') ??
|
||||
request.headers.authorization?.split(' ')[1] ??
|
||||
null;
|
||||
|
||||
const user = dependencies.authenticateWebSocket(token);
|
||||
if (!user) {
|
||||
console.log('[WARN] WebSocket authentication failed');
|
||||
return false;
|
||||
}
|
||||
|
||||
request.user = user;
|
||||
console.log('[OK] WebSocket authenticated for user:', user.username);
|
||||
return true;
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
import type { Server as HttpServer } from 'node:http';
|
||||
|
||||
import { WebSocketServer, type VerifyClientCallbackSync } from 'ws';
|
||||
|
||||
import { handleChatConnection } from '@/modules/websocket/services/chat-websocket.service.js';
|
||||
import { verifyWebSocketClient } from '@/modules/websocket/services/websocket-auth.service.js';
|
||||
import { handlePluginWsProxy } from '@/modules/websocket/services/plugin-websocket-proxy.service.js';
|
||||
import { handleShellConnection } from '@/modules/websocket/services/shell-websocket.service.js';
|
||||
import type { AuthenticatedWebSocketRequest } from '@/shared/types.js';
|
||||
|
||||
type WebSocketServerDependencies = {
|
||||
verifyClient: Parameters<typeof verifyWebSocketClient>[1];
|
||||
chat: Parameters<typeof handleChatConnection>[2];
|
||||
shell: Parameters<typeof handleShellConnection>[1];
|
||||
getPluginPort: Parameters<typeof handlePluginWsProxy>[2];
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates and wires the server-wide websocket gateway used for chat, shell, and
|
||||
* plugin proxy routes.
|
||||
*/
|
||||
export function createWebSocketServer(
|
||||
server: HttpServer,
|
||||
dependencies: WebSocketServerDependencies
|
||||
): WebSocketServer {
|
||||
const wss = new WebSocketServer({
|
||||
server,
|
||||
verifyClient: ((
|
||||
info: Parameters<VerifyClientCallbackSync<AuthenticatedWebSocketRequest>>[0]
|
||||
) => verifyWebSocketClient(info, dependencies.verifyClient)),
|
||||
});
|
||||
|
||||
wss.on('connection', (ws, request) => {
|
||||
const incomingRequest = request as AuthenticatedWebSocketRequest;
|
||||
const url = incomingRequest.url ?? '/';
|
||||
const pathname = new URL(url, 'http://localhost').pathname;
|
||||
|
||||
if (pathname === '/shell') {
|
||||
handleShellConnection(ws, dependencies.shell);
|
||||
return;
|
||||
}
|
||||
|
||||
if (pathname === '/ws') {
|
||||
handleChatConnection(ws, incomingRequest, dependencies.chat);
|
||||
return;
|
||||
}
|
||||
|
||||
if (pathname.startsWith('/plugin-ws/')) {
|
||||
handlePluginWsProxy(ws, pathname, dependencies.getPluginPort);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('[WARN] Unknown WebSocket path:', pathname);
|
||||
ws.close();
|
||||
});
|
||||
|
||||
return wss;
|
||||
}
|
||||
16
server/modules/websocket/services/websocket-state.service.ts
Normal file
16
server/modules/websocket/services/websocket-state.service.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import type { RealtimeClientConnection } from '@/shared/types.js';
|
||||
|
||||
/**
|
||||
* Numeric readyState for an open WebSocket connection.
|
||||
*
|
||||
* We keep this in module state so services that broadcast updates do not need
|
||||
* to import `ws` directly just to compare open/closed state.
|
||||
*/
|
||||
export const WS_OPEN_STATE = 1;
|
||||
|
||||
/**
|
||||
* Shared registry of active chat WebSocket connections.
|
||||
*
|
||||
* Project/session services publish realtime updates by iterating this set.
|
||||
*/
|
||||
export const connectedClients = new Set<RealtimeClientConnection>();
|
||||
@@ -0,0 +1,38 @@
|
||||
import { WS_OPEN_STATE } from '@/modules/websocket/services/websocket-state.service.js';
|
||||
import type { RealtimeClientConnection } from '@/shared/types.js';
|
||||
|
||||
/**
|
||||
* Thin transport adapter that gives WebSocket connections the same interface as
|
||||
* SSE writers used by API routes (`send`, `setSessionId`, `getSessionId`).
|
||||
*/
|
||||
export class WebSocketWriter {
|
||||
ws: RealtimeClientConnection;
|
||||
sessionId: string | null;
|
||||
userId: string | number | null;
|
||||
isWebSocketWriter: boolean;
|
||||
|
||||
constructor(ws: RealtimeClientConnection, userId: string | number | null = null) {
|
||||
this.ws = ws;
|
||||
this.sessionId = null;
|
||||
this.userId = userId;
|
||||
this.isWebSocketWriter = true;
|
||||
}
|
||||
|
||||
send(data: unknown): void {
|
||||
if (this.ws.readyState === WS_OPEN_STATE) {
|
||||
this.ws.send(JSON.stringify(data));
|
||||
}
|
||||
}
|
||||
|
||||
updateWebSocket(newRawWs: RealtimeClientConnection): void {
|
||||
this.ws = newRawWs;
|
||||
}
|
||||
|
||||
setSessionId(sessionId: string): void {
|
||||
this.sessionId = sessionId;
|
||||
}
|
||||
|
||||
getSessionId(): string | null {
|
||||
return this.sessionId;
|
||||
}
|
||||
}
|
||||
2555
server/projects.js
2555
server/projects.js
File diff suppressed because it is too large
Load Diff
@@ -4,8 +4,7 @@ import path from 'path';
|
||||
import os from 'os';
|
||||
import { promises as fs } from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { userDb, apiKeysDb, githubTokensDb } from '../database/db.js';
|
||||
import { addProjectManually } from '../projects.js';
|
||||
import { userDb, apiKeysDb, githubTokensDb, projectsDb } from '../modules/database/index.js';
|
||||
import { queryClaudeSDK } from '../claude-sdk.js';
|
||||
import { spawnCursor } from '../cursor-cli.js';
|
||||
import { queryCodex } from '../openai-codex.js';
|
||||
@@ -13,6 +12,7 @@ import { spawnGemini } from '../gemini-cli.js';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../shared/modelConstants.js';
|
||||
import { IS_PLATFORM } from '../constants/config.js';
|
||||
import { normalizeProjectPath } from '../shared/utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -890,7 +890,7 @@ router.post('/', validateExternalApiKey, async (req, res) => {
|
||||
finalProjectPath = await cloneGitHubRepo(githubUrl.trim(), tokenToUse, targetPath);
|
||||
} else {
|
||||
// Use existing project path
|
||||
finalProjectPath = path.resolve(projectPath);
|
||||
finalProjectPath = normalizeProjectPath(path.resolve(projectPath));
|
||||
|
||||
// Verify the path exists
|
||||
try {
|
||||
@@ -900,19 +900,14 @@ router.post('/', validateExternalApiKey, async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
// Register the project (or use existing registration)
|
||||
let project;
|
||||
try {
|
||||
project = await addProjectManually(finalProjectPath);
|
||||
console.log('📦 Project registered:', project);
|
||||
} catch (error) {
|
||||
// If project already exists, that's fine - continue with the existing registration
|
||||
if (error.message && error.message.includes('Project already configured')) {
|
||||
console.log('📦 Using existing project registration for:', finalProjectPath);
|
||||
project = { path: finalProjectPath };
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
finalProjectPath = normalizeProjectPath(finalProjectPath);
|
||||
|
||||
// Register project path in DB (or reuse existing active registration)
|
||||
const registrationResult = projectsDb.createProjectPath(finalProjectPath, null);
|
||||
if (registrationResult.outcome === 'active_conflict') {
|
||||
console.log('Project registration already exists for:', finalProjectPath);
|
||||
} else {
|
||||
console.log('Project registered:', registrationResult.project);
|
||||
}
|
||||
|
||||
// Set up writer based on streaming mode
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import express from 'express';
|
||||
import bcrypt from 'bcrypt';
|
||||
import { userDb, db } from '../database/db.js';
|
||||
import { userDb } from '../modules/database/index.js';
|
||||
import { getConnection } from '../modules/database/connection.js';
|
||||
import { generateToken, authenticateToken } from '../middleware/auth.js';
|
||||
|
||||
const router = express.Router();
|
||||
const db = getConnection();
|
||||
|
||||
// Check auth status and setup requirements
|
||||
router.get('/status', async (req, res) => {
|
||||
@@ -132,4 +134,4 @@ router.post('/logout', authenticateToken, (req, res) => {
|
||||
res.json({ success: true, message: 'Logged out successfully' });
|
||||
});
|
||||
|
||||
export default router;
|
||||
export default router;
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import express from 'express';
|
||||
import { deleteCodexSession } from '../projects.js';
|
||||
import { sessionNamesDb } from '../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.delete('/sessions/:sessionId', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
await deleteCodexSession(sessionId);
|
||||
sessionNamesDb.deleteName(sessionId, 'codex');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`Error deleting Codex session ${req.params.sessionId}:`, error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -320,7 +320,7 @@ Custom commands can be created in:
|
||||
packageName,
|
||||
uptime: uptimeFormatted,
|
||||
uptimeSeconds: Math.floor(uptime),
|
||||
model: context?.model || 'claude-sonnet-4.5',
|
||||
model: context?.model || CLAUDE_MODELS.DEFAULT,
|
||||
provider: context?.provider || 'claude',
|
||||
nodeVersion: process.version,
|
||||
platform: process.platform
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import express from 'express';
|
||||
|
||||
import sessionManager from '../sessionManager.js';
|
||||
import { sessionNamesDb } from '../database/db.js';
|
||||
import { sessionsDb } from '../modules/database/index.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -13,7 +14,7 @@ router.delete('/sessions/:sessionId', async (req, res) => {
|
||||
}
|
||||
|
||||
await sessionManager.deleteSession(sessionId);
|
||||
sessionNamesDb.deleteName(sessionId, 'gemini');
|
||||
sessionsDb.deleteSessionById(sessionId);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`Error deleting Gemini session ${req.params.sessionId}:`, error);
|
||||
|
||||
@@ -2,7 +2,7 @@ import express from 'express';
|
||||
import { spawn } from 'child_process';
|
||||
import path from 'path';
|
||||
import { promises as fs } from 'fs';
|
||||
import { extractProjectDirectory } from '../projects.js';
|
||||
import { projectsDb } from '../modules/database/index.js';
|
||||
import { queryClaudeSDK } from '../claude-sdk.js';
|
||||
import { spawnCursor } from '../cursor-cli.js';
|
||||
|
||||
@@ -101,14 +101,19 @@ function validateProjectPath(projectPath) {
|
||||
return resolved;
|
||||
}
|
||||
|
||||
// Helper function to get the actual project path from the encoded project name
|
||||
async function getActualProjectPath(projectName) {
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
console.error(`Error extracting project directory for ${projectName}:`, error);
|
||||
throw new Error(`Unable to resolve project path for "${projectName}"`);
|
||||
/**
|
||||
* Resolve the absolute project directory for a given DB `projectId`.
|
||||
*
|
||||
* After the projectName → projectId migration, every git endpoint receives
|
||||
* the DB primary key (`project` query/body param). The legacy filesystem
|
||||
* resolver that walked Claude's JSONL history is no longer used here; the
|
||||
* path comes straight from the `projects` table and is then sanity-checked
|
||||
* by `validateProjectPath` before any `git` command runs against it.
|
||||
*/
|
||||
async function getActualProjectPath(projectId) {
|
||||
const projectPath = await projectsDb.getProjectPathById(projectId);
|
||||
if (!projectPath) {
|
||||
throw new Error(`Unable to resolve project path for "${projectId}"`);
|
||||
}
|
||||
return validateProjectPath(projectPath);
|
||||
}
|
||||
@@ -292,7 +297,7 @@ router.get('/status', async (req, res) => {
|
||||
const { project } = req.query;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -355,7 +360,7 @@ router.get('/diff', async (req, res) => {
|
||||
const { project, file } = req.query;
|
||||
|
||||
if (!project || !file) {
|
||||
return res.status(400).json({ error: 'Project name and file path are required' });
|
||||
return res.status(400).json({ error: 'Project id and file path are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -438,7 +443,7 @@ router.get('/file-with-diff', async (req, res) => {
|
||||
const { project, file } = req.query;
|
||||
|
||||
if (!project || !file) {
|
||||
return res.status(400).json({ error: 'Project name and file path are required' });
|
||||
return res.status(400).json({ error: 'Project id and file path are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -518,7 +523,7 @@ router.post('/initial-commit', async (req, res) => {
|
||||
const { project } = req.body;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -593,7 +598,7 @@ router.post('/revert-local-commit', async (req, res) => {
|
||||
const { project } = req.body;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -640,7 +645,7 @@ router.get('/branches', async (req, res) => {
|
||||
const { project } = req.query;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -684,7 +689,7 @@ router.post('/checkout', async (req, res) => {
|
||||
const { project, branch } = req.body;
|
||||
|
||||
if (!project || !branch) {
|
||||
return res.status(400).json({ error: 'Project name and branch are required' });
|
||||
return res.status(400).json({ error: 'Project id and branch are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -706,7 +711,7 @@ router.post('/create-branch', async (req, res) => {
|
||||
const { project, branch } = req.body;
|
||||
|
||||
if (!project || !branch) {
|
||||
return res.status(400).json({ error: 'Project name and branch name are required' });
|
||||
return res.status(400).json({ error: 'Project id and branch name are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -728,7 +733,7 @@ router.post('/delete-branch', async (req, res) => {
|
||||
const { project, branch } = req.body;
|
||||
|
||||
if (!project || !branch) {
|
||||
return res.status(400).json({ error: 'Project name and branch name are required' });
|
||||
return res.status(400).json({ error: 'Project id and branch name are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -754,7 +759,7 @@ router.get('/commits', async (req, res) => {
|
||||
const { project, limit = 10 } = req.query;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -811,7 +816,7 @@ router.get('/commit-diff', async (req, res) => {
|
||||
const { project, commit } = req.query;
|
||||
|
||||
if (!project || !commit) {
|
||||
return res.status(400).json({ error: 'Project name and commit hash are required' });
|
||||
return res.status(400).json({ error: 'Project id and commit hash are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -843,7 +848,7 @@ router.post('/generate-commit-message', async (req, res) => {
|
||||
const { project, files, provider = 'claude' } = req.body;
|
||||
|
||||
if (!project || !files || files.length === 0) {
|
||||
return res.status(400).json({ error: 'Project name and files are required' });
|
||||
return res.status(400).json({ error: 'Project id and files are required' });
|
||||
}
|
||||
|
||||
// Validate provider
|
||||
@@ -1048,7 +1053,7 @@ router.get('/remote-status', async (req, res) => {
|
||||
const { project } = req.query;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -1126,7 +1131,7 @@ router.post('/fetch', async (req, res) => {
|
||||
const { project } = req.body;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -1167,7 +1172,7 @@ router.post('/pull', async (req, res) => {
|
||||
const { project } = req.body;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -1235,7 +1240,7 @@ router.post('/push', async (req, res) => {
|
||||
const { project } = req.body;
|
||||
|
||||
if (!project) {
|
||||
return res.status(400).json({ error: 'Project name is required' });
|
||||
return res.status(400).json({ error: 'Project id is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -1306,7 +1311,7 @@ router.post('/publish', async (req, res) => {
|
||||
const { project, branch } = req.body;
|
||||
|
||||
if (!project || !branch) {
|
||||
return res.status(400).json({ error: 'Project name and branch are required' });
|
||||
return res.status(400).json({ error: 'Project id and branch are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -1385,7 +1390,7 @@ router.post('/discard', async (req, res) => {
|
||||
const { project, file } = req.body;
|
||||
|
||||
if (!project || !file) {
|
||||
return res.status(400).json({ error: 'Project name and file path are required' });
|
||||
return res.status(400).json({ error: 'Project id and file path are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -1439,7 +1444,7 @@ router.post('/delete-untracked', async (req, res) => {
|
||||
const { project, file } = req.body;
|
||||
|
||||
if (!project || !file) {
|
||||
return res.status(400).json({ error: 'Project name and file path are required' });
|
||||
return res.status(400).json({ error: 'Project id and file path are required' });
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
/**
|
||||
* Unified messages endpoint.
|
||||
*
|
||||
* GET /api/sessions/:sessionId/messages?provider=claude&projectName=foo&limit=50&offset=0
|
||||
*
|
||||
* Replaces the four provider-specific session message endpoints with a single route
|
||||
* that delegates to the appropriate adapter via the provider registry.
|
||||
*
|
||||
* @module routes/messages
|
||||
*/
|
||||
|
||||
import express from 'express';
|
||||
import { sessionsService } from '../modules/providers/services/sessions.service.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* GET /api/sessions/:sessionId/messages
|
||||
*
|
||||
* Auth: authenticateToken applied at mount level in index.js
|
||||
*
|
||||
* Query params:
|
||||
* provider - 'claude' | 'cursor' | 'codex' | 'gemini' (default: 'claude')
|
||||
* projectName - required for claude provider
|
||||
* projectPath - required for cursor provider (absolute path used for cwdId hash)
|
||||
* limit - page size (omit or null for all)
|
||||
* offset - pagination offset (default: 0)
|
||||
*/
|
||||
router.get('/:sessionId/messages', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const provider = String(req.query.provider || 'claude').trim().toLowerCase();
|
||||
const projectName = req.query.projectName || '';
|
||||
const projectPath = req.query.projectPath || '';
|
||||
const limitParam = req.query.limit;
|
||||
const limit = limitParam !== undefined && limitParam !== null && limitParam !== ''
|
||||
? parseInt(limitParam, 10)
|
||||
: null;
|
||||
const offset = parseInt(req.query.offset || '0', 10);
|
||||
|
||||
const availableProviders = sessionsService.listProviderIds();
|
||||
if (!availableProviders.includes(provider)) {
|
||||
const available = availableProviders.join(', ');
|
||||
return res.status(400).json({ error: `Unknown provider: ${provider}. Available: ${available}` });
|
||||
}
|
||||
|
||||
const result = await sessionsService.fetchHistory(provider, sessionId, {
|
||||
projectName,
|
||||
projectPath,
|
||||
limit,
|
||||
offset,
|
||||
});
|
||||
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
console.error('Error fetching unified messages:', error);
|
||||
return res.status(500).json({ error: 'Failed to fetch messages' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -1,548 +0,0 @@
|
||||
import express from 'express';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import { spawn } from 'child_process';
|
||||
import os from 'os';
|
||||
import { addProjectManually } from '../projects.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
function sanitizeGitError(message, token) {
|
||||
if (!message || !token) return message;
|
||||
return message.replace(new RegExp(token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), '***');
|
||||
}
|
||||
|
||||
// Configure allowed workspace root (defaults to user's home directory)
|
||||
export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir();
|
||||
|
||||
// System-critical paths that should never be used as workspace directories
|
||||
export const FORBIDDEN_PATHS = [
|
||||
// Unix
|
||||
'/',
|
||||
'/etc',
|
||||
'/bin',
|
||||
'/sbin',
|
||||
'/usr',
|
||||
'/dev',
|
||||
'/proc',
|
||||
'/sys',
|
||||
'/var',
|
||||
'/boot',
|
||||
'/root',
|
||||
'/lib',
|
||||
'/lib64',
|
||||
'/opt',
|
||||
'/tmp',
|
||||
'/run',
|
||||
// Windows
|
||||
'C:\\Windows',
|
||||
'C:\\Program Files',
|
||||
'C:\\Program Files (x86)',
|
||||
'C:\\ProgramData',
|
||||
'C:\\System Volume Information',
|
||||
'C:\\$Recycle.Bin'
|
||||
];
|
||||
|
||||
/**
|
||||
* Validates that a path is safe for workspace operations
|
||||
* @param {string} requestedPath - The path to validate
|
||||
* @returns {Promise<{valid: boolean, resolvedPath?: string, error?: string}>}
|
||||
*/
|
||||
export async function validateWorkspacePath(requestedPath) {
|
||||
try {
|
||||
// Resolve to absolute path
|
||||
let absolutePath = path.resolve(requestedPath);
|
||||
|
||||
// Check if path is a forbidden system directory
|
||||
const normalizedPath = path.normalize(absolutePath);
|
||||
if (FORBIDDEN_PATHS.includes(normalizedPath) || normalizedPath === '/') {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Cannot use system-critical directories as workspace locations'
|
||||
};
|
||||
}
|
||||
|
||||
// Additional check for paths starting with forbidden directories
|
||||
for (const forbidden of FORBIDDEN_PATHS) {
|
||||
if (normalizedPath === forbidden ||
|
||||
normalizedPath.startsWith(forbidden + path.sep)) {
|
||||
// Exception: /var/tmp and similar user-accessible paths might be allowed
|
||||
// but /var itself and most /var subdirectories should be blocked
|
||||
if (forbidden === '/var' &&
|
||||
(normalizedPath.startsWith('/var/tmp') ||
|
||||
normalizedPath.startsWith('/var/folders'))) {
|
||||
continue; // Allow these specific cases
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot create workspace in system directory: ${forbidden}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Try to resolve the real path (following symlinks)
|
||||
let realPath;
|
||||
try {
|
||||
// Check if path exists to resolve real path
|
||||
await fs.access(absolutePath);
|
||||
realPath = await fs.realpath(absolutePath);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// Path doesn't exist yet - check parent directory
|
||||
let parentPath = path.dirname(absolutePath);
|
||||
try {
|
||||
const parentRealPath = await fs.realpath(parentPath);
|
||||
|
||||
// Reconstruct the full path with real parent
|
||||
realPath = path.join(parentRealPath, path.basename(absolutePath));
|
||||
} catch (parentError) {
|
||||
if (parentError.code === 'ENOENT') {
|
||||
// Parent doesn't exist either - use the absolute path as-is
|
||||
// We'll validate it's within allowed root
|
||||
realPath = absolutePath;
|
||||
} else {
|
||||
throw parentError;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the workspace root to its real path
|
||||
const resolvedWorkspaceRoot = await fs.realpath(WORKSPACES_ROOT);
|
||||
|
||||
// Ensure the resolved path is contained within the allowed workspace root
|
||||
if (!realPath.startsWith(resolvedWorkspaceRoot + path.sep) &&
|
||||
realPath !== resolvedWorkspaceRoot) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}`
|
||||
};
|
||||
}
|
||||
|
||||
// Additional symlink check for existing paths
|
||||
try {
|
||||
await fs.access(absolutePath);
|
||||
const stats = await fs.lstat(absolutePath);
|
||||
|
||||
if (stats.isSymbolicLink()) {
|
||||
// Verify symlink target is also within allowed root
|
||||
const linkTarget = await fs.readlink(absolutePath);
|
||||
const resolvedTarget = path.resolve(path.dirname(absolutePath), linkTarget);
|
||||
const realTarget = await fs.realpath(resolvedTarget);
|
||||
|
||||
if (!realTarget.startsWith(resolvedWorkspaceRoot + path.sep) &&
|
||||
realTarget !== resolvedWorkspaceRoot) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Symlink target is outside the allowed workspace root'
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
// Path doesn't exist - that's fine for new workspace creation
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
resolvedPath: realPath
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Path validation failed: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new workspace
|
||||
* POST /api/projects/create-workspace
|
||||
*
|
||||
* Body:
|
||||
* - workspaceType: 'existing' | 'new'
|
||||
* - path: string (workspace path)
|
||||
* - githubUrl?: string (optional, for new workspaces)
|
||||
* - githubTokenId?: number (optional, ID of stored token)
|
||||
* - newGithubToken?: string (optional, one-time token)
|
||||
*/
|
||||
router.post('/create-workspace', async (req, res) => {
|
||||
try {
|
||||
const { workspaceType, path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!workspaceType || !workspacePath) {
|
||||
return res.status(400).json({ error: 'workspaceType and path are required' });
|
||||
}
|
||||
|
||||
if (!['existing', 'new'].includes(workspaceType)) {
|
||||
return res.status(400).json({ error: 'workspaceType must be "existing" or "new"' });
|
||||
}
|
||||
|
||||
// Validate path safety before any operations
|
||||
const validation = await validateWorkspacePath(workspacePath);
|
||||
if (!validation.valid) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid workspace path',
|
||||
details: validation.error
|
||||
});
|
||||
}
|
||||
|
||||
const absolutePath = validation.resolvedPath;
|
||||
|
||||
// Handle existing workspace
|
||||
if (workspaceType === 'existing') {
|
||||
// Check if the path exists
|
||||
try {
|
||||
await fs.access(absolutePath);
|
||||
const stats = await fs.stat(absolutePath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
return res.status(400).json({ error: 'Path exists but is not a directory' });
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({ error: 'Workspace path does not exist' });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Add the existing workspace to the project list
|
||||
const project = await addProjectManually(absolutePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'Existing workspace added successfully'
|
||||
});
|
||||
}
|
||||
|
||||
// Handle new workspace creation
|
||||
if (workspaceType === 'new') {
|
||||
// Create the directory if it doesn't exist
|
||||
await fs.mkdir(absolutePath, { recursive: true });
|
||||
|
||||
// If GitHub URL is provided, clone the repository
|
||||
if (githubUrl) {
|
||||
let githubToken = null;
|
||||
|
||||
// Get GitHub token if needed
|
||||
if (githubTokenId) {
|
||||
// Fetch token from database
|
||||
const token = await getGithubTokenById(githubTokenId, req.user.id);
|
||||
if (!token) {
|
||||
// Clean up created directory
|
||||
await fs.rm(absolutePath, { recursive: true, force: true });
|
||||
return res.status(404).json({ error: 'GitHub token not found' });
|
||||
}
|
||||
githubToken = token.github_token;
|
||||
} else if (newGithubToken) {
|
||||
githubToken = newGithubToken;
|
||||
}
|
||||
|
||||
// Extract repo name from URL for the clone destination
|
||||
const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = normalizedUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
// Check if clone destination already exists to prevent data loss
|
||||
try {
|
||||
await fs.access(clonePath);
|
||||
return res.status(409).json({
|
||||
error: 'Directory already exists',
|
||||
details: `The destination path "${clonePath}" already exists. Please choose a different location or remove the existing directory.`
|
||||
});
|
||||
} catch (err) {
|
||||
// Directory doesn't exist, which is what we want
|
||||
}
|
||||
|
||||
// Clone the repository into a subfolder
|
||||
try {
|
||||
await cloneGitHubRepository(githubUrl, clonePath, githubToken);
|
||||
} catch (error) {
|
||||
// Only clean up if clone created partial data (check if dir exists and is empty or partial)
|
||||
try {
|
||||
const stats = await fs.stat(clonePath);
|
||||
if (stats.isDirectory()) {
|
||||
await fs.rm(clonePath, { recursive: true, force: true });
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
// Directory doesn't exist or cleanup failed - ignore
|
||||
}
|
||||
throw new Error(`Failed to clone repository: ${error.message}`);
|
||||
}
|
||||
|
||||
// Add the cloned repo path to the project list
|
||||
const project = await addProjectManually(clonePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'New workspace created and repository cloned successfully'
|
||||
});
|
||||
}
|
||||
|
||||
// Add the new workspace to the project list (no clone)
|
||||
const project = await addProjectManually(absolutePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'New workspace created successfully'
|
||||
});
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error creating workspace:', error);
|
||||
res.status(500).json({
|
||||
error: error.message || 'Failed to create workspace',
|
||||
details: process.env.NODE_ENV === 'development' ? error.stack : undefined
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function to get GitHub token from database
|
||||
*/
|
||||
async function getGithubTokenById(tokenId, userId) {
|
||||
const { db } = await import('../database/db.js');
|
||||
|
||||
const credential = db.prepare(
|
||||
'SELECT * FROM user_credentials WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1'
|
||||
).get(tokenId, userId, 'github_token');
|
||||
|
||||
// Return in the expected format (github_token field for compatibility)
|
||||
if (credential) {
|
||||
return {
|
||||
...credential,
|
||||
github_token: credential.credential_value
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone repository with progress streaming (SSE)
|
||||
* GET /api/projects/clone-progress
|
||||
*/
|
||||
router.get('/clone-progress', async (req, res) => {
|
||||
const { path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.query;
|
||||
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const sendEvent = (type, data) => {
|
||||
res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`);
|
||||
};
|
||||
|
||||
try {
|
||||
if (!workspacePath || !githubUrl) {
|
||||
sendEvent('error', { message: 'workspacePath and githubUrl are required' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const validation = await validateWorkspacePath(workspacePath);
|
||||
if (!validation.valid) {
|
||||
sendEvent('error', { message: validation.error });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const absolutePath = validation.resolvedPath;
|
||||
|
||||
await fs.mkdir(absolutePath, { recursive: true });
|
||||
|
||||
let githubToken = null;
|
||||
if (githubTokenId) {
|
||||
const token = await getGithubTokenById(parseInt(githubTokenId), req.user.id);
|
||||
if (!token) {
|
||||
await fs.rm(absolutePath, { recursive: true, force: true });
|
||||
sendEvent('error', { message: 'GitHub token not found' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
githubToken = token.github_token;
|
||||
} else if (newGithubToken) {
|
||||
githubToken = newGithubToken;
|
||||
}
|
||||
|
||||
const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = normalizedUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
// Check if clone destination already exists to prevent data loss
|
||||
try {
|
||||
await fs.access(clonePath);
|
||||
sendEvent('error', { message: `Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.` });
|
||||
res.end();
|
||||
return;
|
||||
} catch (err) {
|
||||
// Directory doesn't exist, which is what we want
|
||||
}
|
||||
|
||||
let cloneUrl = githubUrl;
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(githubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch (error) {
|
||||
// SSH URL or invalid - use as-is
|
||||
}
|
||||
}
|
||||
|
||||
sendEvent('progress', { message: `Cloning into '${repoName}'...` });
|
||||
|
||||
const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, clonePath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0'
|
||||
}
|
||||
});
|
||||
|
||||
let lastError = '';
|
||||
|
||||
gitProcess.stdout.on('data', (data) => {
|
||||
const message = data.toString().trim();
|
||||
if (message) {
|
||||
sendEvent('progress', { message });
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
const message = data.toString().trim();
|
||||
lastError = message;
|
||||
if (message) {
|
||||
sendEvent('progress', { message });
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on('close', async (code) => {
|
||||
if (code === 0) {
|
||||
try {
|
||||
const project = await addProjectManually(clonePath);
|
||||
sendEvent('complete', { project, message: 'Repository cloned successfully' });
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: `Clone succeeded but failed to add project: ${error.message}` });
|
||||
}
|
||||
} else {
|
||||
const sanitizedError = sanitizeGitError(lastError, githubToken);
|
||||
let errorMessage = 'Git clone failed';
|
||||
if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) {
|
||||
errorMessage = 'Authentication failed. Please check your credentials.';
|
||||
} else if (lastError.includes('Repository not found')) {
|
||||
errorMessage = 'Repository not found. Please check the URL and ensure you have access.';
|
||||
} else if (lastError.includes('already exists')) {
|
||||
errorMessage = 'Directory already exists';
|
||||
} else if (sanitizedError) {
|
||||
errorMessage = sanitizedError;
|
||||
}
|
||||
try {
|
||||
await fs.rm(clonePath, { recursive: true, force: true });
|
||||
} catch (cleanupError) {
|
||||
console.error('Failed to clean up after clone failure:', sanitizeGitError(cleanupError.message, githubToken));
|
||||
}
|
||||
sendEvent('error', { message: errorMessage });
|
||||
}
|
||||
res.end();
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
sendEvent('error', { message: 'Git is not installed or not in PATH' });
|
||||
} else {
|
||||
sendEvent('error', { message: error.message });
|
||||
}
|
||||
res.end();
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
gitProcess.kill();
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: error.message });
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function to clone a GitHub repository
|
||||
*/
|
||||
function cloneGitHubRepository(githubUrl, destinationPath, githubToken = null) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let cloneUrl = githubUrl;
|
||||
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(githubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch (error) {
|
||||
// SSH URL - use as-is
|
||||
}
|
||||
}
|
||||
|
||||
const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, destinationPath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0'
|
||||
}
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
gitProcess.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve({ stdout, stderr });
|
||||
} else {
|
||||
let errorMessage = 'Git clone failed';
|
||||
|
||||
if (stderr.includes('Authentication failed') || stderr.includes('could not read Username')) {
|
||||
errorMessage = 'Authentication failed. Please check your GitHub token.';
|
||||
} else if (stderr.includes('Repository not found')) {
|
||||
errorMessage = 'Repository not found. Please check the URL and ensure you have access.';
|
||||
} else if (stderr.includes('already exists')) {
|
||||
errorMessage = 'Directory already exists';
|
||||
} else if (stderr) {
|
||||
errorMessage = stderr;
|
||||
}
|
||||
|
||||
reject(new Error(errorMessage));
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
reject(new Error('Git is not installed or not in PATH'));
|
||||
} else {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default router;
|
||||
@@ -1,5 +1,5 @@
|
||||
import express from 'express';
|
||||
import { apiKeysDb, credentialsDb, notificationPreferencesDb, pushSubscriptionsDb } from '../database/db.js';
|
||||
import { apiKeysDb, credentialsDb, notificationPreferencesDb, pushSubscriptionsDb } from '../modules/database/index.js';
|
||||
import { getPublicKey } from '../services/vapid-keys.js';
|
||||
import { createNotificationEvent, notifyUserIfEnabled } from '../services/notification-orchestrator.js';
|
||||
|
||||
|
||||
@@ -13,10 +13,25 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { promises as fsPromises } from 'fs';
|
||||
import { spawn } from 'child_process';
|
||||
import { extractProjectDirectory } from '../projects.js';
|
||||
import { projectsDb } from '../modules/database/index.js';
|
||||
import { detectTaskMasterMCPServer } from '../utils/mcp-detector.js';
|
||||
import { broadcastTaskMasterProjectUpdate, broadcastTaskMasterTasksUpdate } from '../utils/taskmaster-websocket.js';
|
||||
|
||||
/**
|
||||
* Resolve the absolute project directory from a DB-assigned `projectId`.
|
||||
*
|
||||
* TaskMaster routes used to accept a Claude-encoded folder name (`projectName`)
|
||||
* and derive the path from JSONL history. After the projectId migration the
|
||||
* only identifier we accept is the primary key of the `projects` table, so
|
||||
* every handler calls this helper and 404s when the id is unknown.
|
||||
*/
|
||||
async function resolveProjectPathFromId(projectId) {
|
||||
if (!projectId) {
|
||||
return null;
|
||||
}
|
||||
return projectsDb.getProjectPathById(projectId);
|
||||
}
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
@@ -132,21 +147,22 @@ router.get('/installation-status', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/taskmaster/tasks/:projectName
|
||||
* GET /api/taskmaster/tasks/:projectId
|
||||
* Load actual tasks from .taskmaster/tasks/tasks.json
|
||||
*
|
||||
* `projectId` is the DB primary key of the project; the folder is resolved via
|
||||
* the projects table rather than extracted from Claude JSONL history.
|
||||
*/
|
||||
router.get('/tasks/:projectName', async (req, res) => {
|
||||
router.get('/tasks/:projectId', async (req, res) => {
|
||||
try {
|
||||
const { projectName } = req.params;
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
const { projectId } = req.params;
|
||||
|
||||
// Get project path via the DB; the legacy JSONL-based resolver is gone.
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -158,7 +174,7 @@ router.get('/tasks/:projectName', async (req, res) => {
|
||||
await fsPromises.access(tasksFilePath);
|
||||
} catch (error) {
|
||||
return res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
tasks: [],
|
||||
message: 'No tasks.json file found'
|
||||
});
|
||||
@@ -213,7 +229,7 @@ router.get('/tasks/:projectName', async (req, res) => {
|
||||
}));
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
tasks: transformedTasks,
|
||||
currentTag,
|
||||
@@ -247,21 +263,19 @@ router.get('/tasks/:projectName', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/taskmaster/prd/:projectName
|
||||
* GET /api/taskmaster/prd/:projectId
|
||||
* List all PRD files in the project's .taskmaster/docs directory
|
||||
*/
|
||||
router.get('/prd/:projectName', async (req, res) => {
|
||||
router.get('/prd/:projectId', async (req, res) => {
|
||||
try {
|
||||
const { projectName } = req.params;
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
const { projectId } = req.params;
|
||||
|
||||
// projectId → projectPath lookup through the DB (post-migration).
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -272,7 +286,7 @@ router.get('/prd/:projectName', async (req, res) => {
|
||||
await fsPromises.access(docsPath, fs.constants.R_OK);
|
||||
} catch (error) {
|
||||
return res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
prdFiles: [],
|
||||
message: 'No .taskmaster/docs directory found'
|
||||
});
|
||||
@@ -299,7 +313,7 @@ router.get('/prd/:projectName', async (req, res) => {
|
||||
}
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
prdFiles: prdFiles.sort((a, b) => new Date(b.modified) - new Date(a.modified)),
|
||||
timestamp: new Date().toISOString()
|
||||
@@ -323,12 +337,12 @@ router.get('/prd/:projectName', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/taskmaster/prd/:projectName
|
||||
* POST /api/taskmaster/prd/:projectId
|
||||
* Create or update a PRD file in the project's .taskmaster/docs directory
|
||||
*/
|
||||
router.post('/prd/:projectName', async (req, res) => {
|
||||
router.post('/prd/:projectId', async (req, res) => {
|
||||
try {
|
||||
const { projectName } = req.params;
|
||||
const { projectId } = req.params;
|
||||
const { fileName, content } = req.body;
|
||||
|
||||
if (!fileName || !content) {
|
||||
@@ -346,14 +360,12 @@ router.post('/prd/:projectName', async (req, res) => {
|
||||
});
|
||||
}
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
// Resolve the project folder through the DB using the projectId param.
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -379,7 +391,7 @@ router.post('/prd/:projectName', async (req, res) => {
|
||||
const stats = await fsPromises.stat(filePath);
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
fileName,
|
||||
filePath: path.relative(projectPath, filePath),
|
||||
@@ -408,21 +420,18 @@ router.post('/prd/:projectName', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/taskmaster/prd/:projectName/:fileName
|
||||
* GET /api/taskmaster/prd/:projectId/:fileName
|
||||
* Get content of a specific PRD file
|
||||
*/
|
||||
router.get('/prd/:projectName/:fileName', async (req, res) => {
|
||||
router.get('/prd/:projectId/:fileName', async (req, res) => {
|
||||
try {
|
||||
const { projectName, fileName } = req.params;
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
const { projectId, fileName } = req.params;
|
||||
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -444,7 +453,7 @@ router.get('/prd/:projectName/:fileName', async (req, res) => {
|
||||
const stats = await fsPromises.stat(filePath);
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
fileName,
|
||||
filePath: path.relative(projectPath, filePath),
|
||||
@@ -473,21 +482,18 @@ router.get('/prd/:projectName/:fileName', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/taskmaster/init/:projectName
|
||||
* POST /api/taskmaster/init/:projectId
|
||||
* Initialize TaskMaster in a project
|
||||
*/
|
||||
router.post('/init/:projectName', async (req, res) => {
|
||||
router.post('/init/:projectId', async (req, res) => {
|
||||
try {
|
||||
const { projectName } = req.params;
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
const { projectId } = req.params;
|
||||
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -522,17 +528,19 @@ router.post('/init/:projectName', async (req, res) => {
|
||||
|
||||
initProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
// Broadcast TaskMaster project update via WebSocket
|
||||
// Broadcast TaskMaster project update via WebSocket. The
|
||||
// WebSocket payload keeps using `projectId` so the frontend
|
||||
// can match notifications against the current selection.
|
||||
if (req.app.locals.wss) {
|
||||
broadcastTaskMasterProjectUpdate(
|
||||
req.app.locals.wss,
|
||||
projectName,
|
||||
req.app.locals.wss,
|
||||
projectId,
|
||||
{ hasTaskmaster: true, status: 'initialized' }
|
||||
);
|
||||
}
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
message: 'TaskMaster initialized successfully',
|
||||
output: stdout,
|
||||
@@ -562,12 +570,12 @@ router.post('/init/:projectName', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/taskmaster/add-task/:projectName
|
||||
* POST /api/taskmaster/add-task/:projectId
|
||||
* Add a new task to the project
|
||||
*/
|
||||
router.post('/add-task/:projectName', async (req, res) => {
|
||||
router.post('/add-task/:projectId', async (req, res) => {
|
||||
try {
|
||||
const { projectName } = req.params;
|
||||
const { projectId } = req.params;
|
||||
const { prompt, title, description, priority = 'medium', dependencies } = req.body;
|
||||
|
||||
if (!prompt && (!title || !description)) {
|
||||
@@ -576,15 +584,12 @@ router.post('/add-task/:projectName', async (req, res) => {
|
||||
message: 'Either "prompt" or both "title" and "description" are required'
|
||||
});
|
||||
}
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -629,16 +634,17 @@ router.post('/add-task/:projectName', async (req, res) => {
|
||||
console.log('Stderr:', stderr);
|
||||
|
||||
if (code === 0) {
|
||||
// Broadcast task update via WebSocket
|
||||
// Broadcast task update via WebSocket using the projectId so
|
||||
// clients subscribed to this project get notified immediately.
|
||||
if (req.app.locals.wss) {
|
||||
broadcastTaskMasterTasksUpdate(
|
||||
req.app.locals.wss,
|
||||
projectName
|
||||
req.app.locals.wss,
|
||||
projectId
|
||||
);
|
||||
}
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
message: 'Task added successfully',
|
||||
output: stdout,
|
||||
@@ -666,22 +672,19 @@ router.post('/add-task/:projectName', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* PUT /api/taskmaster/update-task/:projectName/:taskId
|
||||
* PUT /api/taskmaster/update-task/:projectId/:taskId
|
||||
* Update a specific task using TaskMaster CLI
|
||||
*/
|
||||
router.put('/update-task/:projectName/:taskId', async (req, res) => {
|
||||
router.put('/update-task/:projectId/:taskId', async (req, res) => {
|
||||
try {
|
||||
const { projectName, taskId } = req.params;
|
||||
const { projectId, taskId } = req.params;
|
||||
const { title, description, status, priority, details } = req.body;
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -707,11 +710,11 @@ router.put('/update-task/:projectName/:taskId', async (req, res) => {
|
||||
if (code === 0) {
|
||||
// Broadcast task update via WebSocket
|
||||
if (req.app.locals.wss) {
|
||||
broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectName);
|
||||
broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectId);
|
||||
}
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
taskId,
|
||||
message: 'Task status updated successfully',
|
||||
@@ -759,11 +762,11 @@ router.put('/update-task/:projectName/:taskId', async (req, res) => {
|
||||
if (code === 0) {
|
||||
// Broadcast task update via WebSocket
|
||||
if (req.app.locals.wss) {
|
||||
broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectName);
|
||||
broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectId);
|
||||
}
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
taskId,
|
||||
message: 'Task updated successfully',
|
||||
@@ -793,22 +796,19 @@ router.put('/update-task/:projectName/:taskId', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/taskmaster/parse-prd/:projectName
|
||||
* POST /api/taskmaster/parse-prd/:projectId
|
||||
* Parse a PRD file to generate tasks
|
||||
*/
|
||||
router.post('/parse-prd/:projectName', async (req, res) => {
|
||||
router.post('/parse-prd/:projectId', async (req, res) => {
|
||||
try {
|
||||
const { projectName } = req.params;
|
||||
const { projectId } = req.params;
|
||||
const { fileName = 'prd.txt', numTasks, append = false } = req.body;
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -859,13 +859,13 @@ router.post('/parse-prd/:projectName', async (req, res) => {
|
||||
// Broadcast task update via WebSocket
|
||||
if (req.app.locals.wss) {
|
||||
broadcastTaskMasterTasksUpdate(
|
||||
req.app.locals.wss,
|
||||
projectName
|
||||
req.app.locals.wss,
|
||||
projectId
|
||||
);
|
||||
}
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
prdFile: fileName,
|
||||
message: 'PRD parsed and tasks generated successfully',
|
||||
@@ -1340,12 +1340,12 @@ Description of the business problem, data sources, and expected insights.
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/taskmaster/apply-template/:projectName
|
||||
* POST /api/taskmaster/apply-template/:projectId
|
||||
* Apply a PRD template to create a new PRD file
|
||||
*/
|
||||
router.post('/apply-template/:projectName', async (req, res) => {
|
||||
router.post('/apply-template/:projectId', async (req, res) => {
|
||||
try {
|
||||
const { projectName } = req.params;
|
||||
const { projectId } = req.params;
|
||||
const { templateId, fileName = 'prd.txt', customizations = {} } = req.body;
|
||||
|
||||
if (!templateId) {
|
||||
@@ -1355,14 +1355,11 @@ router.post('/apply-template/:projectName', async (req, res) => {
|
||||
});
|
||||
}
|
||||
|
||||
// Get project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
const projectPath = await resolveProjectPathFromId(projectId);
|
||||
if (!projectPath) {
|
||||
return res.status(404).json({
|
||||
error: 'Project not found',
|
||||
message: `Project "${projectName}" does not exist`
|
||||
message: `Project "${projectId}" does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1401,7 +1398,7 @@ router.post('/apply-template/:projectName', async (req, res) => {
|
||||
await fsPromises.writeFile(filePath, content, 'utf8');
|
||||
|
||||
res.json({
|
||||
projectName,
|
||||
projectId,
|
||||
projectPath,
|
||||
templateId,
|
||||
templateName: template.name,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import express from 'express';
|
||||
import { userDb } from '../database/db.js';
|
||||
import { userDb } from '../modules/database/index.js';
|
||||
import { authenticateToken } from '../middleware/auth.js';
|
||||
import { getSystemGitConfig } from '../utils/gitConfig.js';
|
||||
import { spawn } from 'child_process';
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import webPush from 'web-push';
|
||||
import { notificationPreferencesDb, pushSubscriptionsDb, sessionNamesDb } from '../database/db.js';
|
||||
|
||||
import { notificationPreferencesDb, pushSubscriptionsDb, sessionsDb } from '../modules/database/index.js';
|
||||
|
||||
const KIND_TO_PREF_KEY = {
|
||||
action_required: 'actionRequired',
|
||||
@@ -107,7 +108,7 @@ function resolveSessionName(event) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return normalizeSessionName(sessionNamesDb.getName(event.sessionId, event.provider));
|
||||
return normalizeSessionName(sessionsDb.getSessionName(event.sessionId, event.provider));
|
||||
}
|
||||
|
||||
function buildPushBody(event) {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import webPush from 'web-push';
|
||||
import { db } from '../database/db.js';
|
||||
import { getConnection } from '../modules/database/connection.js';
|
||||
|
||||
let cachedKeys = null;
|
||||
const db = getConnection();
|
||||
|
||||
function ensureVapidKeys() {
|
||||
if (cachedKeys) return cachedKeys;
|
||||
|
||||
@@ -9,6 +9,7 @@ import type {
|
||||
UpsertProviderMcpServerInput,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
//----------------- PROVIDER CONTRACT INTERFACES ------------
|
||||
/**
|
||||
* Main provider contract for CLI and SDK integrations.
|
||||
*
|
||||
@@ -20,11 +21,16 @@ export interface IProvider {
|
||||
readonly mcp: IProviderMcp;
|
||||
readonly auth: IProviderAuth;
|
||||
readonly sessions: IProviderSessions;
|
||||
readonly sessionSynchronizer: IProviderSessionSynchronizer;
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER AUTH INTERFACE ------------
|
||||
/**
|
||||
* Auth contract for one provider.
|
||||
*
|
||||
* Implementations should return a complete installation/authentication status
|
||||
* without throwing for normal "not installed" or "not authenticated" states.
|
||||
*/
|
||||
export interface IProviderAuth {
|
||||
/**
|
||||
@@ -33,8 +39,13 @@ export interface IProviderAuth {
|
||||
getStatus(): Promise<ProviderAuthStatus>;
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER MCP INTERFACE ------------
|
||||
/**
|
||||
* MCP contract for one provider.
|
||||
*
|
||||
* Implementations must map provider-native MCP config formats to shared
|
||||
* `ProviderMcpServer` records used by routes and frontend state.
|
||||
*/
|
||||
export interface IProviderMcp {
|
||||
listServers(options?: { workspacePath?: string }): Promise<Record<McpScope, ProviderMcpServer[]>>;
|
||||
@@ -45,10 +56,37 @@ export interface IProviderMcp {
|
||||
): Promise<{ removed: boolean; provider: LLMProvider; name: string; scope: McpScope }>;
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER SESSION INTERFACE ------------
|
||||
/**
|
||||
* Session/history contract for one provider.
|
||||
*
|
||||
* Implementations normalize provider-specific events and message history into
|
||||
* shared transport shapes consumed by API routes and realtime streams.
|
||||
*/
|
||||
export interface IProviderSessions {
|
||||
normalizeMessage(raw: unknown, sessionId: string | null): NormalizedMessage[];
|
||||
fetchHistory(sessionId: string, options?: FetchHistoryOptions): Promise<FetchHistoryResult>;
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER SESSION SYNCHRONIZER INTERFACE ------------
|
||||
/**
|
||||
* Session indexing contract for one provider.
|
||||
*
|
||||
* Implementations scan provider-specific session artifacts on disk and upsert
|
||||
* normalized session metadata into the database. The service layer uses this
|
||||
* interface for both full rescans and single-file incremental sync triggered
|
||||
* by filesystem watcher events.
|
||||
*/
|
||||
export interface IProviderSessionSynchronizer {
|
||||
/**
|
||||
* Scans provider session artifacts and upserts discovered sessions into DB.
|
||||
*/
|
||||
synchronize(since?: Date): Promise<number>;
|
||||
|
||||
/**
|
||||
* Parses and upserts one provider artifact file without running a full scan.
|
||||
*/
|
||||
synchronizeFile(filePath: string): Promise<string | null>;
|
||||
}
|
||||
|
||||
@@ -1,18 +1,77 @@
|
||||
// -------------- HTTP API response shapes for the server, shared across modules --------------
|
||||
import type { IncomingMessage } from 'node:http';
|
||||
|
||||
//----------------- HTTP RESPONSE SHAPES ------------
|
||||
/**
|
||||
* Canonical success envelope used by backend APIs that return a structured payload.
|
||||
*
|
||||
* Use this for route handlers that need a stable `success/data` shape so frontend
|
||||
* consumers can parse responses consistently across endpoints.
|
||||
*/
|
||||
export type ApiSuccessShape<TData = unknown> = {
|
||||
success: true;
|
||||
data: TData;
|
||||
};
|
||||
|
||||
/**
|
||||
* Generic plain-object record used when parsing loosely typed JSON payloads.
|
||||
*
|
||||
* Use this only after runtime shape checks, not as a replacement for validated
|
||||
* domain models.
|
||||
*/
|
||||
export type AnyRecord = Record<string, any>;
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
// ---------------------------
|
||||
//----------------- WEBSOCKET TRANSPORT TYPES ------------
|
||||
/**
|
||||
* Minimal websocket client contract used by backend broadcaster services.
|
||||
*
|
||||
* Any transport object added to `connectedClients` must implement these two
|
||||
* members so shared services can safely send JSON strings and check whether the
|
||||
* socket is still open before broadcasting.
|
||||
*/
|
||||
export type RealtimeClientConnection = {
|
||||
readyState: number;
|
||||
send(data: string): void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Authenticated user payload attached to websocket upgrade requests.
|
||||
*
|
||||
* Platform and OSS auth flows currently use either `id` or `userId`; both are
|
||||
* represented here so websocket handlers can resolve a stable writer user id.
|
||||
*/
|
||||
export type AuthenticatedWebSocketUser = {
|
||||
id?: string | number;
|
||||
userId?: string | number;
|
||||
username?: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
/**
|
||||
* HTTP upgrade request shape after websocket authentication succeeds.
|
||||
*
|
||||
* `verifyClient` populates `request.user` with the authenticated payload, and
|
||||
* downstream websocket handlers rely on this extended request type.
|
||||
*/
|
||||
export type AuthenticatedWebSocketRequest = IncomingMessage & {
|
||||
user?: AuthenticatedWebSocketUser;
|
||||
};
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER MESSAGE MODEL ------------
|
||||
/**
|
||||
* Providers supported by the unified server runtime.
|
||||
*
|
||||
* Use this as the source of truth whenever a function or payload needs to identify
|
||||
* a specific LLM integration.
|
||||
*/
|
||||
export type LLMProvider = 'claude' | 'codex' | 'gemini' | 'cursor';
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Message/event variants emitted by provider adapters and normalized transports.
|
||||
*
|
||||
* Keep this union in sync with event kinds produced by provider session adapters.
|
||||
*/
|
||||
export type MessageKind =
|
||||
| 'text'
|
||||
| 'tool_use'
|
||||
@@ -30,11 +89,10 @@ export type MessageKind =
|
||||
| 'task_notification';
|
||||
|
||||
/**
|
||||
* Provider-neutral message event emitted over REST and realtime transports.
|
||||
* Provider-neutral message envelope used in REST responses and realtime channels.
|
||||
*
|
||||
* Providers all produce their own native SDK/CLI event shapes, so this type keeps
|
||||
* the common envelope strict while allowing provider-specific details to ride
|
||||
* along as optional properties.
|
||||
* Every provider-specific message must be converted into this shape before being
|
||||
* emitted outside provider-specific modules.
|
||||
*/
|
||||
export type NormalizedMessage = {
|
||||
id: string;
|
||||
@@ -73,21 +131,21 @@ export type NormalizedMessage = {
|
||||
};
|
||||
|
||||
/**
|
||||
* Pagination and provider lookup options for reading persisted session history.
|
||||
* Shared options used to fetch historical provider messages.
|
||||
*
|
||||
* Consumers should pass provider-specific lookup hints (`projectPath`) only
|
||||
* when the selected provider requires them.
|
||||
*/
|
||||
export type FetchHistoryOptions = {
|
||||
/** Claude project folder name. Required by Claude history lookup. */
|
||||
projectName?: string;
|
||||
/** Absolute workspace path. Required by Cursor to compute its chat hash. */
|
||||
projectPath?: string;
|
||||
/** Page size. `null` means all messages. */
|
||||
limit?: number | null;
|
||||
/** Pagination offset from the newest messages. */
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Provider-neutral history result returned by the unified messages endpoint.
|
||||
* Standardized response payload returned from provider history readers.
|
||||
*
|
||||
* Use this as the contract for APIs that return paginated conversation history.
|
||||
*/
|
||||
export type FetchHistoryResult = {
|
||||
messages: NormalizedMessage[];
|
||||
@@ -98,21 +156,40 @@ export type FetchHistoryResult = {
|
||||
tokenUsage?: unknown;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
// ---------------------------
|
||||
//----------------- SHARED ERROR TYPES ------------
|
||||
/**
|
||||
* Optional metadata used when constructing application-level errors.
|
||||
*
|
||||
* `statusCode` should reflect the HTTP response status, while `code` identifies
|
||||
* the stable machine-readable error category.
|
||||
*/
|
||||
export type AppErrorOptions = {
|
||||
code?: string;
|
||||
statusCode?: number;
|
||||
details?: unknown;
|
||||
};
|
||||
|
||||
// -------------------- MCP related shared types --------------------
|
||||
// ---------------------------
|
||||
//----------------- MCP TYPES ------------
|
||||
/**
|
||||
* Scope where an MCP server definition is stored and resolved.
|
||||
*
|
||||
* `user` is global for a user account, `local` is provider-local, and `project`
|
||||
* is tied to a specific project path.
|
||||
*/
|
||||
export type McpScope = 'user' | 'local' | 'project';
|
||||
|
||||
/**
|
||||
* Transport protocol used by an MCP server definition.
|
||||
*/
|
||||
export type McpTransport = 'stdio' | 'http' | 'sse';
|
||||
|
||||
/**
|
||||
* Provider MCP server descriptor normalized for frontend consumption.
|
||||
* Normalized MCP server model exposed to frontend and route handlers.
|
||||
*
|
||||
* Provider adapters should map provider-native config to this structure before
|
||||
* returning results.
|
||||
*/
|
||||
export type ProviderMcpServer = {
|
||||
provider: LLMProvider;
|
||||
@@ -131,7 +208,10 @@ export type ProviderMcpServer = {
|
||||
};
|
||||
|
||||
/**
|
||||
* Shared payload shape for MCP server create/update operations.
|
||||
* Payload for create/update MCP server operations.
|
||||
*
|
||||
* Routes and services should accept this type, validate it, and then persist it
|
||||
* through provider-specific MCP repositories.
|
||||
*/
|
||||
export type UpsertProviderMcpServerInput = {
|
||||
name: string;
|
||||
@@ -149,18 +229,13 @@ export type UpsertProviderMcpServerInput = {
|
||||
envHttpHeaders?: Record<string, string>;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
// -------------------- Provider auth status types --------------------
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER AUTH TYPES ------------
|
||||
/**
|
||||
* Result of a provider status check (installation + authentication).
|
||||
* Authentication status result returned by provider health checks.
|
||||
*
|
||||
* installed - Whether the provider's CLI/SDK is available
|
||||
* provider - Provider id the status belongs to
|
||||
* authenticated - Whether valid credentials exist
|
||||
* email - User email or auth method identifier
|
||||
* method - Auth method (e.g. 'api_key', 'credentials_file')
|
||||
* [error] - Error message if not installed or not authenticated
|
||||
* This shape is consumed by settings/status endpoints to report installation and
|
||||
* credential state for each provider.
|
||||
*/
|
||||
export type ProviderAuthStatus = {
|
||||
installed: boolean;
|
||||
@@ -170,3 +245,83 @@ export type ProviderAuthStatus = {
|
||||
method: string | null;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
// ---------------------------
|
||||
//----------------- SHARED DATABASE CREDENTIAL TYPES ------------
|
||||
/**
|
||||
* Safe credential view returned by credential listing APIs.
|
||||
*
|
||||
* This intentionally excludes the raw credential secret while still exposing
|
||||
* metadata needed for UI rendering and management operations.
|
||||
*/
|
||||
export type CredentialPublicRow = {
|
||||
id: number;
|
||||
credential_name: string;
|
||||
credential_type: string;
|
||||
description: string | null;
|
||||
created_at: string;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Result returned after creating a credential record.
|
||||
*
|
||||
* Use this return shape when callers need the created id and display metadata,
|
||||
* but must never receive the stored secret value.
|
||||
*/
|
||||
export type CreateCredentialResult = {
|
||||
id: number | bigint;
|
||||
credentialName: string;
|
||||
credentialType: string;
|
||||
};
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROJECT PERSISTENCE TYPES ------------
|
||||
/**
|
||||
* Canonical project row shape returned by the projects repository.
|
||||
*
|
||||
* Use this type whenever backend services need to pass around one database
|
||||
* project record without leaking raw SQL row typing across modules.
|
||||
*/
|
||||
export type ProjectRepositoryRow = {
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name: string | null;
|
||||
isStarred: number;
|
||||
isArchived: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Result category returned by `projectsDb.createProjectPath`.
|
||||
*
|
||||
* `created` means a fresh row was inserted, `reactivated_archived` means an
|
||||
* existing archived path was accepted and updated, and `active_conflict` means
|
||||
* an already-active path blocked project creation.
|
||||
*/
|
||||
export type CreateProjectPathOutcome =
|
||||
| 'created'
|
||||
| 'reactivated_archived'
|
||||
| 'active_conflict';
|
||||
|
||||
/**
|
||||
* Structured result returned by project-path upsert operations.
|
||||
*
|
||||
* Services should use this result to decide whether a request succeeded,
|
||||
* should return a conflict, or needs follow-up retrieval of row metadata.
|
||||
*/
|
||||
export type CreateProjectPathResult = {
|
||||
outcome: CreateProjectPathOutcome;
|
||||
project: ProjectRepositoryRow | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validation result for user-supplied workspace/project paths.
|
||||
*
|
||||
* `resolvedPath` is present only when validation succeeds. `error` is present
|
||||
* only when validation fails and is suitable for user-facing diagnostics.
|
||||
*/
|
||||
export type WorkspacePathValidationResult = {
|
||||
valid: boolean;
|
||||
resolvedPath?: string;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { mkdir, readFile, writeFile } from 'node:fs/promises';
|
||||
import fs from 'node:fs';
|
||||
import {
|
||||
access,
|
||||
lstat,
|
||||
mkdir,
|
||||
readFile,
|
||||
readdir,
|
||||
readlink,
|
||||
realpath,
|
||||
stat,
|
||||
writeFile,
|
||||
} from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import type { NextFunction, Request, RequestHandler, Response } from 'express';
|
||||
|
||||
@@ -10,8 +22,17 @@ import type {
|
||||
ApiSuccessShape,
|
||||
AppErrorOptions,
|
||||
NormalizedMessage,
|
||||
WorkspacePathValidationResult,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
//----------------- NORMALIZED MESSAGE HELPER INPUT TYPES ------------
|
||||
/**
|
||||
* Input payload accepted by `createNormalizedMessage`.
|
||||
*
|
||||
* Callers provide provider-specific fields plus the required `kind/provider`
|
||||
* pair; this helper fills missing envelope fields (`id`, `sessionId`,
|
||||
* `timestamp`) in a consistent way.
|
||||
*/
|
||||
type NormalizedMessageInput =
|
||||
{
|
||||
kind: NormalizedMessage['kind'];
|
||||
@@ -21,6 +42,14 @@ type NormalizedMessageInput =
|
||||
timestamp?: string | null;
|
||||
} & Record<string, unknown>;
|
||||
|
||||
// ---------------------------
|
||||
//----------------- HTTP HANDLER UTILITIES ------------
|
||||
/**
|
||||
* Wraps arbitrary data in the standard API success envelope.
|
||||
*
|
||||
* Use this helper in route handlers to keep successful JSON responses consistent
|
||||
* across endpoints.
|
||||
*/
|
||||
export function createApiSuccessResponse<TData>(
|
||||
data: TData,
|
||||
): ApiSuccessShape<TData> {
|
||||
@@ -30,6 +59,12 @@ export function createApiSuccessResponse<TData>(
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an async Express handler into a standard `RequestHandler` and routes
|
||||
* rejected promises to Express error middleware.
|
||||
*
|
||||
* Use this to avoid repeating `try/catch(next)` in every async route.
|
||||
*/
|
||||
export function asyncHandler(
|
||||
handler: (req: Request, res: Response, next: NextFunction) => Promise<unknown>
|
||||
): RequestHandler {
|
||||
@@ -38,7 +73,14 @@ export function asyncHandler(
|
||||
};
|
||||
}
|
||||
|
||||
// --------- Global app error class for consistent error handling across the server ---------
|
||||
// ---------------------------
|
||||
//----------------- SHARED ERROR UTILITIES ------------
|
||||
/**
|
||||
* Shared application error with HTTP status and machine-readable code metadata.
|
||||
*
|
||||
* Throw this from service/route layers when the caller should receive a
|
||||
* controlled error response rather than a generic 500.
|
||||
*/
|
||||
export class AppError extends Error {
|
||||
readonly code: string;
|
||||
readonly statusCode: number;
|
||||
@@ -53,9 +95,226 @@ export class AppError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
// ---------------------------
|
||||
//----------------- WORKSPACE PATH VALIDATION UTILITIES ------------
|
||||
/**
|
||||
* Root directory that all workspace/project paths must stay under.
|
||||
*
|
||||
* This is resolved from `WORKSPACES_ROOT` when configured; otherwise it falls
|
||||
* back to the current user's home directory.
|
||||
*/
|
||||
export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir();
|
||||
|
||||
// ------------------------ Normalized provider message helpers ------------------------
|
||||
/**
|
||||
* System-critical paths that must never be used as workspace roots.
|
||||
*
|
||||
* The validation helper blocks these values directly and also blocks paths
|
||||
* nested under them (with explicit allow-list exceptions where necessary).
|
||||
*/
|
||||
export const FORBIDDEN_WORKSPACE_PATHS = [
|
||||
// Unix
|
||||
'/',
|
||||
'/etc',
|
||||
'/bin',
|
||||
'/sbin',
|
||||
'/usr',
|
||||
'/dev',
|
||||
'/proc',
|
||||
'/sys',
|
||||
'/var',
|
||||
'/boot',
|
||||
'/root',
|
||||
'/lib',
|
||||
'/lib64',
|
||||
'/opt',
|
||||
'/tmp',
|
||||
'/run',
|
||||
// Windows
|
||||
'C:\\Windows',
|
||||
'C:\\Program Files',
|
||||
'C:\\Program Files (x86)',
|
||||
'C:\\ProgramData',
|
||||
'C:\\System Volume Information',
|
||||
'C:\\$Recycle.Bin',
|
||||
];
|
||||
|
||||
function stripWindowsLongPathPrefix(inputPath: string): string {
|
||||
if (inputPath.startsWith('\\\\?\\UNC\\')) {
|
||||
return `\\\\${inputPath.slice('\\\\?\\UNC\\'.length)}`;
|
||||
}
|
||||
|
||||
if (inputPath.startsWith('\\\\?\\')) {
|
||||
return inputPath.slice('\\\\?\\'.length);
|
||||
}
|
||||
|
||||
return inputPath;
|
||||
}
|
||||
|
||||
function shouldUseWindowsPathNormalization(inputPath: string): boolean {
|
||||
if (process.platform === 'win32') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return inputPath.startsWith('\\\\') || /^[a-zA-Z]:([\\/]|$)/.test(inputPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Canonicalizes project/workspace paths for stable DB keys and comparisons.
|
||||
*
|
||||
* Normalization rules:
|
||||
* - trim whitespace
|
||||
* - strip Windows long-path prefixes (`\\?\` and `\\?\UNC\`)
|
||||
* - normalize path separators and dot segments
|
||||
* - trim trailing separators except for filesystem roots
|
||||
*/
|
||||
export function normalizeProjectPath(inputPath: string): string {
|
||||
if (typeof inputPath !== 'string') {
|
||||
return '';
|
||||
}
|
||||
|
||||
const trimmed = inputPath.trim();
|
||||
if (!trimmed) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const withoutLongPrefix = stripWindowsLongPathPrefix(trimmed);
|
||||
const useWindowsPathRules = shouldUseWindowsPathNormalization(withoutLongPrefix);
|
||||
const normalized = useWindowsPathRules
|
||||
? path.win32.normalize(withoutLongPrefix)
|
||||
: path.posix.normalize(withoutLongPrefix);
|
||||
|
||||
if (!normalized) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const parser = useWindowsPathRules ? path.win32 : path.posix;
|
||||
const root = parser.parse(normalized).root;
|
||||
if (normalized === root) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
return normalized.replace(/[\\/]+$/, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a user-supplied workspace path is safe to use.
|
||||
*
|
||||
* Call this before any filesystem mutation that creates or registers projects.
|
||||
* The function resolves symlinks, enforces `WORKSPACES_ROOT` containment, and
|
||||
* blocks known system directories.
|
||||
*/
|
||||
export async function validateWorkspacePath(requestedPath: string): Promise<WorkspacePathValidationResult> {
|
||||
try {
|
||||
const normalizedRequestedPath = normalizeProjectPath(requestedPath);
|
||||
if (!normalizedRequestedPath) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Workspace path is required',
|
||||
};
|
||||
}
|
||||
|
||||
const absolutePath = path.resolve(normalizedRequestedPath);
|
||||
const normalizedPath = normalizeProjectPath(absolutePath);
|
||||
|
||||
if (FORBIDDEN_WORKSPACE_PATHS.includes(normalizedPath) || normalizedPath === '/') {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Cannot use system-critical directories as workspace locations',
|
||||
};
|
||||
}
|
||||
|
||||
for (const forbiddenPath of FORBIDDEN_WORKSPACE_PATHS) {
|
||||
const normalizedForbiddenPath = normalizeProjectPath(forbiddenPath);
|
||||
if (
|
||||
normalizedPath === normalizedForbiddenPath
|
||||
|| normalizedPath.startsWith(`${normalizedForbiddenPath}${path.sep}`)
|
||||
) {
|
||||
// Allow specific user-writable folders under /var.
|
||||
if (
|
||||
normalizedForbiddenPath === '/var'
|
||||
&& (normalizedPath.startsWith('/var/tmp') || normalizedPath.startsWith('/var/folders'))
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot create workspace in system directory: ${forbiddenPath}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let resolvedPath = normalizeProjectPath(absolutePath);
|
||||
try {
|
||||
await access(absolutePath);
|
||||
resolvedPath = normalizeProjectPath(await realpath(absolutePath));
|
||||
} catch (error) {
|
||||
const fileError = error as NodeJS.ErrnoException;
|
||||
if (fileError.code !== 'ENOENT') {
|
||||
throw fileError;
|
||||
}
|
||||
|
||||
const parentPath = path.dirname(absolutePath);
|
||||
try {
|
||||
const parentRealPath = await realpath(parentPath);
|
||||
resolvedPath = normalizeProjectPath(path.join(parentRealPath, path.basename(absolutePath)));
|
||||
} catch (parentError) {
|
||||
const parentFileError = parentError as NodeJS.ErrnoException;
|
||||
if (parentFileError.code !== 'ENOENT') {
|
||||
throw parentFileError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const resolvedWorkspaceRoot = normalizeProjectPath(await realpath(WORKSPACES_ROOT));
|
||||
if (
|
||||
!resolvedPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`)
|
||||
&& resolvedPath !== resolvedWorkspaceRoot
|
||||
) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}`,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
await access(absolutePath);
|
||||
const pathStats = await lstat(absolutePath);
|
||||
if (pathStats.isSymbolicLink()) {
|
||||
const symlinkTarget = await readlink(absolutePath);
|
||||
const resolvedSymlinkPath = path.resolve(path.dirname(absolutePath), symlinkTarget);
|
||||
const realSymlinkPath = await realpath(resolvedSymlinkPath);
|
||||
if (
|
||||
!realSymlinkPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`)
|
||||
&& realSymlinkPath !== resolvedWorkspaceRoot
|
||||
) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Symlink target is outside the allowed workspace root',
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const fileError = error as NodeJS.ErrnoException;
|
||||
if (fileError.code !== 'ENOENT') {
|
||||
throw fileError;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
resolvedPath,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Path validation failed: ${(error as Error).message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- NORMALIZED PROVIDER MESSAGE UTILITIES ------------
|
||||
/**
|
||||
* Generates a stable unique id for normalized provider messages.
|
||||
*/
|
||||
@@ -80,9 +339,8 @@ export function createNormalizedMessage(fields: NormalizedMessageInput): Normali
|
||||
};
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
|
||||
// ------------------------ The following are mainly for provider MCP runtimes ------------------------
|
||||
// ---------------------------
|
||||
//----------------- MCP CONFIG PARSING UTILITIES ------------
|
||||
/**
|
||||
* Safely narrows an unknown value to a plain object record.
|
||||
*
|
||||
@@ -154,6 +412,62 @@ export const readStringRecord = (value: unknown): Record<string, string> | undef
|
||||
return Object.keys(normalized).length > 0 ? normalized : undefined;
|
||||
};
|
||||
|
||||
// ---------------------------
|
||||
//----------------- WEBSOCKET PAYLOAD PARSING UTILITIES ------------
|
||||
/**
|
||||
* Parses one websocket message payload into a plain JSON object record.
|
||||
*
|
||||
* Use this in realtime handlers that receive raw websocket payloads as `string`,
|
||||
* `Buffer`, `ArrayBuffer`, or chunk arrays. The helper converts supported
|
||||
* payload formats to UTF-8 text, parses JSON, and returns only object payloads.
|
||||
* Primitive/array/invalid payloads return `null` so callers can handle bad input
|
||||
* without throwing from deeply nested message handlers.
|
||||
*/
|
||||
export const parseIncomingJsonObject = (payload: unknown): AnyRecord | null => {
|
||||
let text: string | null = null;
|
||||
|
||||
if (typeof payload === 'string') {
|
||||
text = payload;
|
||||
} else if (Buffer.isBuffer(payload)) {
|
||||
text = payload.toString('utf8');
|
||||
} else if (payload instanceof ArrayBuffer) {
|
||||
text = Buffer.from(payload).toString('utf8');
|
||||
} else if (Array.isArray(payload)) {
|
||||
const buffers = payload
|
||||
.map((entry) => {
|
||||
if (Buffer.isBuffer(entry)) {
|
||||
return entry;
|
||||
}
|
||||
|
||||
if (entry instanceof ArrayBuffer) {
|
||||
return Buffer.from(entry);
|
||||
}
|
||||
|
||||
if (ArrayBuffer.isView(entry)) {
|
||||
return Buffer.from(entry.buffer, entry.byteOffset, entry.byteLength);
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.filter((entry): entry is Buffer => entry !== null);
|
||||
|
||||
if (buffers.length > 0) {
|
||||
text = Buffer.concat(buffers).toString('utf8');
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof text !== 'string' || text.trim().length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(text) as unknown;
|
||||
return readObjectRecord(parsed);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads a JSON config file and guarantees a plain object result.
|
||||
*
|
||||
@@ -189,5 +503,167 @@ export const writeJsonConfig = async (filePath: string, data: Record<string, unk
|
||||
await writeFile(filePath, `${JSON.stringify(data, null, 2)}\n`, 'utf8');
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
// ---------------------------
|
||||
//----------------- SESSION SYNCHRONIZER TITLE HELPERS ------------
|
||||
/**
|
||||
* Produces a compact session title suitable for UI rendering and DB storage.
|
||||
*
|
||||
* Use this when converting provider-native names into a consistent title value.
|
||||
* The helper collapses repeated whitespace, trims the result, and truncates it
|
||||
* to 120 characters so every provider writes stable and bounded metadata.
|
||||
* If the normalized input is empty, it returns the supplied fallback title.
|
||||
*/
|
||||
export function normalizeSessionName(rawValue: string | undefined, fallback: string): string {
|
||||
const normalized = (rawValue ?? '').replace(/\s+/g, ' ').trim();
|
||||
if (!normalized) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
return normalized.slice(0, 120);
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- SESSION SYNCHRONIZER FILESYSTEM HELPERS ------------
|
||||
/**
|
||||
* Recursively discovers files that match one extension, with optional incremental filtering.
|
||||
*
|
||||
* Provider synchronizers call this to find transcript artifacts under provider
|
||||
* home directories. Pass `lastScanAt` to include only files created after the
|
||||
* previous scan, or pass `null` to perform a full rescan. Missing directories
|
||||
* are treated as empty because not every provider exists on every machine.
|
||||
*/
|
||||
export async function findFilesRecursivelyCreatedAfter(
|
||||
rootDir: string,
|
||||
extension: string,
|
||||
lastScanAt: Date | null,
|
||||
fileList: string[] = []
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const entries = await readdir(rootDir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(rootDir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await findFilesRecursivelyCreatedAfter(fullPath, extension, lastScanAt, fileList);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!entry.isFile() || !entry.name.endsWith(extension)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!lastScanAt) {
|
||||
fileList.push(fullPath);
|
||||
continue;
|
||||
}
|
||||
|
||||
const fileStat = await stat(fullPath);
|
||||
if (fileStat.birthtime > lastScanAt) {
|
||||
fileList.push(fullPath);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing provider folders are expected in first-run or partial setups.
|
||||
}
|
||||
|
||||
return fileList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads file creation/update timestamps and maps them to DB-friendly ISO strings.
|
||||
*
|
||||
* Session indexers use this to persist `created_at` and `updated_at` metadata
|
||||
* when upserting sessions. If the file cannot be read, an empty object is
|
||||
* returned so indexing can continue for other files.
|
||||
*/
|
||||
export async function readFileTimestamps(
|
||||
filePath: string
|
||||
): Promise<{ createdAt?: string; updatedAt?: string }> {
|
||||
try {
|
||||
const fileStat = await stat(filePath);
|
||||
return {
|
||||
createdAt: fileStat.birthtime.toISOString(),
|
||||
updatedAt: fileStat.mtime.toISOString(),
|
||||
};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- SESSION SYNCHRONIZER JSONL PARSING HELPERS ------------
|
||||
/**
|
||||
* Builds a first-seen key/value lookup map from a JSONL file.
|
||||
*
|
||||
* Use this for provider index files where session id -> display name metadata
|
||||
* is stored line-by-line. The first value for each key wins, preserving the
|
||||
* earliest known label while avoiding repeated map overwrites.
|
||||
*/
|
||||
export async function buildLookupMap(
|
||||
filePath: string,
|
||||
keyField: string,
|
||||
valueField: string
|
||||
): Promise<Map<string, string>> {
|
||||
const lookup = new Map<string, string>();
|
||||
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(trimmed) as Record<string, unknown>;
|
||||
const key = parsed[keyField];
|
||||
const value = parsed[valueField];
|
||||
|
||||
if (typeof key === 'string' && typeof value === 'string' && !lookup.has(key)) {
|
||||
lookup.set(key, value);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing or unreadable lookup files should not block session sync.
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a JSONL file and returns the first extracted payload that matches caller criteria.
|
||||
*
|
||||
* The caller supplies an `extractor` that validates provider-specific row
|
||||
* shapes. This helper centralizes line-by-line parsing and lets indexers stop
|
||||
* scanning as soon as one valid row is found.
|
||||
*/
|
||||
export async function extractFirstValidJsonlData<T>(
|
||||
filePath: string,
|
||||
extractor: (parsedJson: unknown) => T | null | undefined
|
||||
): Promise<T | null> {
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(trimmed);
|
||||
const extracted = extractor(parsed);
|
||||
if (extracted) {
|
||||
lineReader.close();
|
||||
fileStream.close();
|
||||
return extracted;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed or missing artifacts so full scans keep progressing.
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -7,20 +7,25 @@
|
||||
*/
|
||||
|
||||
/**
|
||||
* Broadcast TaskMaster project update to all connected clients
|
||||
* Broadcast TaskMaster project update to all connected clients.
|
||||
*
|
||||
* The payload key is `projectId` post-migration so frontend listeners can
|
||||
* match notifications against the DB-assigned project identifier they
|
||||
* already use everywhere else.
|
||||
*
|
||||
* @param {WebSocket.Server} wss - WebSocket server instance
|
||||
* @param {string} projectName - Name of the updated project
|
||||
* @param {string} projectId - DB id of the updated project
|
||||
* @param {Object} taskMasterData - Updated TaskMaster data
|
||||
*/
|
||||
export function broadcastTaskMasterProjectUpdate(wss, projectName, taskMasterData) {
|
||||
if (!wss || !projectName) {
|
||||
console.warn('TaskMaster WebSocket broadcast: Missing wss or projectName');
|
||||
export function broadcastTaskMasterProjectUpdate(wss, projectId, taskMasterData) {
|
||||
if (!wss || !projectId) {
|
||||
console.warn('TaskMaster WebSocket broadcast: Missing wss or projectId');
|
||||
return;
|
||||
}
|
||||
|
||||
const message = {
|
||||
type: 'taskmaster-project-updated',
|
||||
projectName,
|
||||
projectId,
|
||||
taskMasterData,
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
@@ -38,20 +43,21 @@ export function broadcastTaskMasterProjectUpdate(wss, projectName, taskMasterDat
|
||||
}
|
||||
|
||||
/**
|
||||
* Broadcast TaskMaster tasks update for a specific project
|
||||
* @param {WebSocket.Server} wss - WebSocket server instance
|
||||
* @param {string} projectName - Name of the project with updated tasks
|
||||
* Broadcast TaskMaster tasks update for a specific project.
|
||||
*
|
||||
* @param {WebSocket.Server} wss - WebSocket server instance
|
||||
* @param {string} projectId - DB id of the project with updated tasks
|
||||
* @param {Object} tasksData - Updated tasks data
|
||||
*/
|
||||
export function broadcastTaskMasterTasksUpdate(wss, projectName, tasksData) {
|
||||
if (!wss || !projectName) {
|
||||
console.warn('TaskMaster WebSocket broadcast: Missing wss or projectName');
|
||||
export function broadcastTaskMasterTasksUpdate(wss, projectId, tasksData) {
|
||||
if (!wss || !projectId) {
|
||||
console.warn('TaskMaster WebSocket broadcast: Missing wss or projectId');
|
||||
return;
|
||||
}
|
||||
|
||||
const message = {
|
||||
type: 'taskmaster-tasks-updated',
|
||||
projectName,
|
||||
projectId,
|
||||
tasksData,
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
@@ -13,8 +13,8 @@
|
||||
export const CLAUDE_MODELS = {
|
||||
// Models in SDK format (what the actual SDK accepts)
|
||||
OPTIONS: [
|
||||
{ value: "sonnet", label: "Sonnet" },
|
||||
{ value: "opus", label: "Opus" },
|
||||
{ value: "sonnet", label: "Sonnet" },
|
||||
{ value: "haiku", label: "Haiku" },
|
||||
{ value: "claude-opus-4-6", label: "Opus 4.6" },
|
||||
{ value: "opusplan", label: "Opus Plan" },
|
||||
@@ -59,6 +59,7 @@ export const CURSOR_MODELS = {
|
||||
*/
|
||||
export const CODEX_MODELS = {
|
||||
OPTIONS: [
|
||||
{ value: "gpt-5.5", label: "GPT-5.5" },
|
||||
{ value: "gpt-5.4", label: "GPT-5.4" },
|
||||
{ value: "gpt-5.4-mini", label: "GPT-5.4 mini" },
|
||||
{ value: "gpt-5.3-codex", label: "GPT-5.3 Codex" },
|
||||
|
||||
@@ -135,7 +135,9 @@ export function useChatComposerState({
|
||||
}: UseChatComposerStateArgs) {
|
||||
const [input, setInput] = useState(() => {
|
||||
if (typeof window !== 'undefined' && selectedProject) {
|
||||
return safeLocalStorage.getItem(`draft_input_${selectedProject.name}`) || '';
|
||||
// Draft inputs are keyed by the DB projectId so per-project drafts
|
||||
// survive display-name changes.
|
||||
return safeLocalStorage.getItem(`draft_input_${selectedProject.projectId}`) || '';
|
||||
}
|
||||
return '';
|
||||
});
|
||||
@@ -276,9 +278,11 @@ export function useChatComposerState({
|
||||
const args =
|
||||
commandMatch && commandMatch[1] ? commandMatch[1].trim().split(/\s+/) : [];
|
||||
|
||||
// The `/api/commands/execute` context sends `projectId` now instead of
|
||||
// a folder-derived project name; the path is still included verbatim.
|
||||
const context = {
|
||||
projectPath: selectedProject.fullPath || selectedProject.path,
|
||||
projectName: selectedProject.name,
|
||||
projectId: selectedProject.projectId,
|
||||
sessionId: currentSessionId,
|
||||
provider,
|
||||
model: provider === 'cursor' ? cursorModel : provider === 'codex' ? codexModel : provider === 'gemini' ? geminiModel : claudeModel,
|
||||
@@ -503,7 +507,7 @@ export function useChatComposerState({
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await authenticatedFetch(`/api/projects/${selectedProject.name}/upload-images`, {
|
||||
const response = await authenticatedFetch(`/api/projects/${selectedProject.projectId}/upload-images`, {
|
||||
method: 'POST',
|
||||
headers: {},
|
||||
body: formData,
|
||||
@@ -669,7 +673,7 @@ export function useChatComposerState({
|
||||
textareaRef.current.style.height = 'auto';
|
||||
}
|
||||
|
||||
safeLocalStorage.removeItem(`draft_input_${selectedProject.name}`);
|
||||
safeLocalStorage.removeItem(`draft_input_${selectedProject.projectId}`);
|
||||
},
|
||||
[
|
||||
selectedSession,
|
||||
@@ -712,22 +716,22 @@ export function useChatComposerState({
|
||||
if (!selectedProject) {
|
||||
return;
|
||||
}
|
||||
const savedInput = safeLocalStorage.getItem(`draft_input_${selectedProject.name}`) || '';
|
||||
const savedInput = safeLocalStorage.getItem(`draft_input_${selectedProject.projectId}`) || '';
|
||||
setInput((previous) => {
|
||||
const next = previous === savedInput ? previous : savedInput;
|
||||
inputValueRef.current = next;
|
||||
return next;
|
||||
});
|
||||
}, [selectedProject?.name]);
|
||||
}, [selectedProject?.projectId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!selectedProject) {
|
||||
return;
|
||||
}
|
||||
if (input !== '') {
|
||||
safeLocalStorage.setItem(`draft_input_${selectedProject.name}`, input);
|
||||
safeLocalStorage.setItem(`draft_input_${selectedProject.projectId}`, input);
|
||||
} else {
|
||||
safeLocalStorage.removeItem(`draft_input_${selectedProject.name}`);
|
||||
safeLocalStorage.removeItem(`draft_input_${selectedProject.projectId}`);
|
||||
}
|
||||
}, [input, selectedProject]);
|
||||
|
||||
|
||||
@@ -241,7 +241,8 @@ export function useChatSessionState({
|
||||
try {
|
||||
const slot = await sessionStore.fetchMore(selectedSession.id, {
|
||||
provider: sessionProvider as LLMProvider,
|
||||
projectName: selectedProject.name,
|
||||
// DB-assigned projectId replaces the legacy folder-derived name.
|
||||
projectId: selectedProject.projectId,
|
||||
projectPath: selectedProject.fullPath || selectedProject.path || '',
|
||||
limit: MESSAGES_PER_PAGE,
|
||||
});
|
||||
@@ -296,7 +297,7 @@ export function useChatSessionState({
|
||||
topLoadLockRef.current = false;
|
||||
pendingScrollRestoreRef.current = null;
|
||||
setIsUserScrolledUp(false);
|
||||
}, [selectedProject?.name, selectedSession?.id]);
|
||||
}, [selectedProject?.projectId, selectedSession?.id]);
|
||||
|
||||
// Initial scroll to bottom
|
||||
useEffect(() => {
|
||||
@@ -325,7 +326,7 @@ export function useChatSessionState({
|
||||
}
|
||||
|
||||
const provider = (selectedSession.__provider || localStorage.getItem('selected-provider') as Provider) || 'claude';
|
||||
const sessionKey = `${selectedSession.id}:${selectedProject.name}:${provider}`;
|
||||
const sessionKey = `${selectedSession.id}:${selectedProject.projectId}:${provider}`;
|
||||
|
||||
// Skip if already loaded and fresh
|
||||
if (lastLoadedSessionKeyRef.current === sessionKey && sessionStore.has(selectedSession.id) && !sessionStore.isStale(selectedSession.id)) {
|
||||
@@ -375,7 +376,7 @@ export function useChatSessionState({
|
||||
setIsLoadingSessionMessages(true);
|
||||
sessionStore.fetchFromServer(selectedSession.id, {
|
||||
provider: (selectedSession.__provider || provider) as LLMProvider,
|
||||
projectName: selectedProject.name,
|
||||
projectId: selectedProject.projectId,
|
||||
projectPath: selectedProject.fullPath || selectedProject.path || '',
|
||||
limit: MESSAGES_PER_PAGE,
|
||||
offset: 0,
|
||||
@@ -411,7 +412,7 @@ export function useChatSessionState({
|
||||
if (!isLoading) {
|
||||
await sessionStore.refreshFromServer(selectedSession.id, {
|
||||
provider: (selectedSession.__provider || provider) as LLMProvider,
|
||||
projectName: selectedProject.name,
|
||||
projectId: selectedProject.projectId,
|
||||
projectPath: selectedProject.fullPath || selectedProject.path || '',
|
||||
});
|
||||
|
||||
@@ -469,7 +470,7 @@ export function useChatSessionState({
|
||||
// Load all messages into the store for search navigation
|
||||
const slot = await sessionStore.fetchFromServer(selectedSession.id, {
|
||||
provider: sessionProvider as LLMProvider,
|
||||
projectName: selectedProject.name,
|
||||
projectId: selectedProject.projectId,
|
||||
projectPath: selectedProject.fullPath || selectedProject.path || '',
|
||||
limit: null,
|
||||
offset: 0,
|
||||
@@ -550,7 +551,8 @@ export function useChatSessionState({
|
||||
|
||||
const fetchInitialTokenUsage = async () => {
|
||||
try {
|
||||
const url = `/api/projects/${selectedProject.name}/sessions/${selectedSession.id}/token-usage`;
|
||||
// Token usage endpoint is now keyed by the DB projectId.
|
||||
const url = `/api/projects/${selectedProject.projectId}/sessions/${selectedSession.id}/token-usage`;
|
||||
const response = await authenticatedFetch(url);
|
||||
if (response.ok) {
|
||||
setTokenBudget(await response.json());
|
||||
@@ -656,7 +658,7 @@ export function useChatSessionState({
|
||||
try {
|
||||
const slot = await sessionStore.fetchFromServer(requestSessionId, {
|
||||
provider: sessionProvider as LLMProvider,
|
||||
projectName: selectedProject.name,
|
||||
projectId: selectedProject.projectId,
|
||||
projectPath: selectedProject.fullPath || selectedProject.path || '',
|
||||
limit: null,
|
||||
offset: 0,
|
||||
|
||||
@@ -59,16 +59,18 @@ export function useFileMentions({ selectedProject, input, setInput, textareaRef
|
||||
const abortController = new AbortController();
|
||||
|
||||
const fetchProjectFiles = async () => {
|
||||
const projectName = selectedProject?.name;
|
||||
// File list is keyed by DB projectId now; the backend resolves it to
|
||||
// the project's path before reading.
|
||||
const projectId = selectedProject?.projectId;
|
||||
setFileList([]);
|
||||
setFilteredFiles([]);
|
||||
if (!projectName) {
|
||||
if (!projectId) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
const response = await api.getFiles(projectName, { signal: abortController.signal });
|
||||
const response = await api.getFiles(projectId, { signal: abortController.signal });
|
||||
if (!response.ok) {
|
||||
return;
|
||||
}
|
||||
@@ -88,7 +90,7 @@ export function useFileMentions({ selectedProject, input, setInput, textareaRef
|
||||
return () => {
|
||||
abortController.abort();
|
||||
};
|
||||
}, [selectedProject?.name]);
|
||||
}, [selectedProject?.projectId]);
|
||||
|
||||
useEffect(() => {
|
||||
const textBeforeCursor = input.slice(0, cursorPosition);
|
||||
|
||||
@@ -114,7 +114,7 @@ export function useSlashCommands({
|
||||
})),
|
||||
];
|
||||
|
||||
const parsedHistory = readCommandHistory(selectedProject.name);
|
||||
const parsedHistory = readCommandHistory(selectedProject.projectId);
|
||||
const sortedCommands = [...allCommands].sort((commandA, commandB) => {
|
||||
const commandAUsage = parsedHistory[commandA.name] || 0;
|
||||
const commandBUsage = parsedHistory[commandB.name] || 0;
|
||||
@@ -173,7 +173,7 @@ export function useSlashCommands({
|
||||
return [];
|
||||
}
|
||||
|
||||
const parsedHistory = readCommandHistory(selectedProject.name);
|
||||
const parsedHistory = readCommandHistory(selectedProject.projectId);
|
||||
|
||||
return slashCommands
|
||||
.map((command) => ({
|
||||
@@ -191,9 +191,9 @@ export function useSlashCommands({
|
||||
return;
|
||||
}
|
||||
|
||||
const parsedHistory = readCommandHistory(selectedProject.name);
|
||||
const parsedHistory = readCommandHistory(selectedProject.projectId);
|
||||
parsedHistory[command.name] = (parsedHistory[command.name] || 0) + 1;
|
||||
saveCommandHistory(selectedProject.name, parsedHistory);
|
||||
saveCommandHistory(selectedProject.projectId, parsedHistory);
|
||||
},
|
||||
[selectedProject],
|
||||
);
|
||||
|
||||
@@ -212,7 +212,8 @@ function ChatInterface({
|
||||
const providerVal = (localStorage.getItem('selected-provider') as LLMProvider) || 'claude';
|
||||
await sessionStore.refreshFromServer(selectedSession.id, {
|
||||
provider: (selectedSession.__provider || providerVal) as LLMProvider,
|
||||
projectName: selectedProject.name,
|
||||
// Use DB projectId; legacy folder-derived projectName is no longer accepted here.
|
||||
projectId: selectedProject.projectId,
|
||||
projectPath: selectedProject.fullPath || selectedProject.path || '',
|
||||
});
|
||||
setIsLoading(false);
|
||||
|
||||
@@ -23,7 +23,10 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume
|
||||
const [saveSuccess, setSaveSuccess] = useState(false);
|
||||
const [saveError, setSaveError] = useState<string | null>(null);
|
||||
const [isBinary, setIsBinary] = useState(false);
|
||||
const fileProjectName = file.projectName ?? projectPath;
|
||||
// `fileProjectId` is the DB primary key passed down from the editor sidebar;
|
||||
// the fallback to `projectPath` preserves older callers that didn't yet
|
||||
// propagate the identifier.
|
||||
const fileProjectId = file.projectId ?? projectPath;
|
||||
const filePath = file.path;
|
||||
const fileName = file.name;
|
||||
const fileDiffNewString = file.diffInfo?.new_string;
|
||||
@@ -49,11 +52,11 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fileProjectName) {
|
||||
if (!fileProjectId) {
|
||||
throw new Error('Missing project identifier');
|
||||
}
|
||||
|
||||
const response = await api.readFile(fileProjectName, filePath);
|
||||
const response = await api.readFile(fileProjectId, filePath);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load file: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
@@ -70,18 +73,18 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume
|
||||
};
|
||||
|
||||
loadFileContent();
|
||||
}, [file.diffInfo, file.name, fileDiffNewString, fileDiffOldString, fileName, filePath, fileProjectName]);
|
||||
}, [file.diffInfo, file.name, fileDiffNewString, fileDiffOldString, fileName, filePath, fileProjectId]);
|
||||
|
||||
const handleSave = useCallback(async () => {
|
||||
setSaving(true);
|
||||
setSaveError(null);
|
||||
|
||||
try {
|
||||
if (!fileProjectName) {
|
||||
if (!fileProjectId) {
|
||||
throw new Error('Missing project identifier');
|
||||
}
|
||||
|
||||
const response = await api.saveFile(fileProjectName, filePath, content);
|
||||
const response = await api.saveFile(fileProjectId, filePath, content);
|
||||
|
||||
if (!response.ok) {
|
||||
const contentType = response.headers.get('content-type');
|
||||
@@ -106,7 +109,7 @@ export const useCodeEditorDocument = ({ file, projectPath }: UseCodeEditorDocume
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
}, [content, filePath, fileProjectName]);
|
||||
}, [content, filePath, fileProjectId]);
|
||||
|
||||
const handleDownload = useCallback(() => {
|
||||
const blob = new Blob([content], { type: 'text/plain' });
|
||||
|
||||
@@ -29,11 +29,13 @@ export const useEditorSidebar = ({
|
||||
setEditingFile({
|
||||
name: fileName,
|
||||
path: filePath,
|
||||
projectName: selectedProject?.name,
|
||||
// DB projectId is forwarded to the editor so it can read/save files
|
||||
// via `/api/projects/:projectId/file` endpoints.
|
||||
projectId: selectedProject?.projectId,
|
||||
diffInfo,
|
||||
});
|
||||
},
|
||||
[selectedProject?.name],
|
||||
[selectedProject?.projectId],
|
||||
);
|
||||
|
||||
const handleCloseEditor = useCallback(() => {
|
||||
|
||||
@@ -7,7 +7,9 @@ export type CodeEditorDiffInfo = {
|
||||
export type CodeEditorFile = {
|
||||
name: string;
|
||||
path: string;
|
||||
projectName?: string;
|
||||
// DB projectId; used by the editor to build `/api/projects/:projectId/file`
|
||||
// URLs for reading and saving content.
|
||||
projectId?: string;
|
||||
diffInfo?: CodeEditorDiffInfo | null;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
@@ -20,9 +20,11 @@ export function useFileTreeData(selectedProject: Project | null): UseFileTreeDat
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const projectName = selectedProject?.name;
|
||||
// File-tree requests use the DB projectId; the backend resolves it to the
|
||||
// project's absolute path through the projects table.
|
||||
const projectId = selectedProject?.projectId;
|
||||
|
||||
if (!projectName) {
|
||||
if (!projectId) {
|
||||
setFiles([]);
|
||||
setLoading(false);
|
||||
return;
|
||||
@@ -42,7 +44,7 @@ export function useFileTreeData(selectedProject: Project | null): UseFileTreeDat
|
||||
setLoading(true);
|
||||
}
|
||||
try {
|
||||
const response = await api.getFiles(projectName, { signal: abortControllerRef.current!.signal });
|
||||
const response = await api.getFiles(projectId, { signal: abortControllerRef.current!.signal });
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
@@ -79,7 +81,7 @@ export function useFileTreeData(selectedProject: Project | null): UseFileTreeDat
|
||||
isActive = false;
|
||||
abortControllerRef.current?.abort();
|
||||
};
|
||||
}, [selectedProject?.name, refreshKey]);
|
||||
}, [selectedProject?.projectId, refreshKey]);
|
||||
|
||||
return {
|
||||
files,
|
||||
|
||||
@@ -126,7 +126,7 @@ export function useFileTreeOperations({
|
||||
|
||||
setOperationLoading(true);
|
||||
try {
|
||||
const response = await api.renameFile(selectedProject.name, {
|
||||
const response = await api.renameFile(selectedProject.projectId, {
|
||||
oldPath: renamingItem.path,
|
||||
newName: renameValue,
|
||||
});
|
||||
@@ -161,7 +161,7 @@ export function useFileTreeOperations({
|
||||
|
||||
setOperationLoading(true);
|
||||
try {
|
||||
const response = await api.deleteFile(selectedProject.name, {
|
||||
const response = await api.deleteFile(selectedProject.projectId, {
|
||||
path: item.path,
|
||||
type: item.type,
|
||||
});
|
||||
@@ -212,7 +212,7 @@ export function useFileTreeOperations({
|
||||
|
||||
setOperationLoading(true);
|
||||
try {
|
||||
const response = await api.createFile(selectedProject.name, {
|
||||
const response = await api.createFile(selectedProject.projectId, {
|
||||
path: newItemParent,
|
||||
type: newItemType,
|
||||
name: newItemName,
|
||||
@@ -287,7 +287,7 @@ export function useFileTreeOperations({
|
||||
if (!selectedProject) return;
|
||||
|
||||
// Use the binary streaming endpoint so downloads preserve raw bytes.
|
||||
const response = await api.readFileBlob(selectedProject.name, item.path);
|
||||
const response = await api.readFileBlob(selectedProject.projectId, item.path);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to download file');
|
||||
@@ -308,7 +308,7 @@ export function useFileTreeOperations({
|
||||
const fullPath = currentPath ? `${currentPath}/${node.name}` : node.name;
|
||||
|
||||
if (node.type === 'file') {
|
||||
const response = await api.readFileBlob(selectedProject.name, node.path);
|
||||
const response = await api.readFileBlob(selectedProject.projectId, node.path);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download "${node.name}" for ZIP export`);
|
||||
}
|
||||
|
||||
@@ -154,7 +154,8 @@ export const useFileTreeUpload = ({
|
||||
formData.append('relativePaths', JSON.stringify(relativePaths));
|
||||
|
||||
const response = await api.post(
|
||||
`/projects/${encodeURIComponent(selectedProject!.name)}/files/upload`,
|
||||
// File upload endpoint is keyed by DB projectId post-migration.
|
||||
`/projects/${encodeURIComponent(selectedProject!.projectId)}/files/upload`,
|
||||
formData
|
||||
);
|
||||
|
||||
|
||||
@@ -19,7 +19,8 @@ export interface FileTreeImageSelection {
|
||||
name: string;
|
||||
path: string;
|
||||
projectPath?: string;
|
||||
projectName: string;
|
||||
// DB projectId; used by ImageViewer to build the raw content URL.
|
||||
projectId: string;
|
||||
}
|
||||
|
||||
export interface FileIconData {
|
||||
|
||||
@@ -101,7 +101,9 @@ export default function FileTree({ selectedProject, onFileOpen }: FileTreeProps)
|
||||
name: item.name,
|
||||
path: item.path,
|
||||
projectPath: selectedProject.path,
|
||||
projectName: selectedProject.name,
|
||||
// Image URL uses the DB projectId so ImageViewer can hit the
|
||||
// /api/projects/:projectId/files/content endpoint directly.
|
||||
projectId: selectedProject.projectId,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ type ImageViewerProps = {
|
||||
};
|
||||
|
||||
export default function ImageViewer({ file, onClose }: ImageViewerProps) {
|
||||
const imagePath = `/api/projects/${file.projectName}/files/content?path=${encodeURIComponent(file.path)}`;
|
||||
const imagePath = `/api/projects/${file.projectId}/files/content?path=${encodeURIComponent(file.path)}`;
|
||||
const [imageUrl, setImageUrl] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
@@ -64,10 +64,12 @@ export function useGitPanelController({
|
||||
const [operationError, setOperationError] = useState<string | null>(null);
|
||||
|
||||
const clearOperationError = useCallback(() => setOperationError(null), []);
|
||||
const selectedProjectNameRef = useRef<string | null>(selectedProject?.name ?? null);
|
||||
// Tracks the DB projectId so async requests can detect stale responses when
|
||||
// the user switches projects mid-flight.
|
||||
const selectedProjectIdRef = useRef<string | null>(selectedProject?.projectId ?? null);
|
||||
|
||||
useEffect(() => {
|
||||
selectedProjectNameRef.current = selectedProject?.name ?? null;
|
||||
selectedProjectIdRef.current = selectedProject?.projectId ?? null;
|
||||
}, [selectedProject]);
|
||||
|
||||
const provider = useSelectedProvider();
|
||||
@@ -78,18 +80,19 @@ export function useGitPanelController({
|
||||
return;
|
||||
}
|
||||
|
||||
const projectName = selectedProject.name;
|
||||
// Git endpoints receive the DB projectId via the `project` query param.
|
||||
const projectId = selectedProject.projectId;
|
||||
|
||||
try {
|
||||
const response = await fetchWithAuth(
|
||||
`/api/git/diff?project=${encodeURIComponent(projectName)}&file=${encodeURIComponent(filePath)}`,
|
||||
`/api/git/diff?project=${encodeURIComponent(projectId)}&file=${encodeURIComponent(filePath)}`,
|
||||
{ signal },
|
||||
);
|
||||
const data = await readJson<GitDiffResponse>(response, signal);
|
||||
|
||||
if (
|
||||
signal?.aborted ||
|
||||
selectedProjectNameRef.current !== projectName
|
||||
selectedProjectIdRef.current !== projectId
|
||||
) {
|
||||
return;
|
||||
}
|
||||
@@ -116,16 +119,17 @@ export function useGitPanelController({
|
||||
return;
|
||||
}
|
||||
|
||||
const projectName = selectedProject.name;
|
||||
// `project` query param carries the DB projectId everywhere now.
|
||||
const projectId = selectedProject.projectId;
|
||||
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const response = await fetchWithAuth(`/api/git/status?project=${encodeURIComponent(projectName)}`, { signal });
|
||||
const response = await fetchWithAuth(`/api/git/status?project=${encodeURIComponent(projectId)}`, { signal });
|
||||
const data = await readJson<GitStatusResponse>(response, signal);
|
||||
|
||||
if (
|
||||
signal?.aborted ||
|
||||
selectedProjectNameRef.current !== projectName
|
||||
selectedProjectIdRef.current !== projectId
|
||||
) {
|
||||
return;
|
||||
}
|
||||
@@ -150,7 +154,7 @@ export function useGitPanelController({
|
||||
}
|
||||
|
||||
if (
|
||||
selectedProjectNameRef.current !== projectName
|
||||
selectedProjectIdRef.current !== projectId
|
||||
) {
|
||||
return;
|
||||
}
|
||||
@@ -169,7 +173,7 @@ export function useGitPanelController({
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetchWithAuth(`/api/git/branches?project=${encodeURIComponent(selectedProject.name)}`);
|
||||
const response = await fetchWithAuth(`/api/git/branches?project=${encodeURIComponent(selectedProject.projectId)}`);
|
||||
const data = await readJson<GitBranchesResponse>(response);
|
||||
|
||||
if (!data.error && data.branches) {
|
||||
@@ -196,7 +200,7 @@ export function useGitPanelController({
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetchWithAuth(`/api/git/remote-status?project=${encodeURIComponent(selectedProject.name)}`);
|
||||
const response = await fetchWithAuth(`/api/git/remote-status?project=${encodeURIComponent(selectedProject.projectId)}`);
|
||||
const data = await readJson<GitRemoteStatus | GitApiErrorResponse>(response);
|
||||
|
||||
if (!data.error) {
|
||||
@@ -222,7 +226,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
branch: branchName,
|
||||
}),
|
||||
});
|
||||
@@ -257,7 +261,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
branch: trimmedBranchName,
|
||||
}),
|
||||
});
|
||||
@@ -290,7 +294,7 @@ export function useGitPanelController({
|
||||
const response = await fetchWithAuth('/api/git/delete-branch', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ project: selectedProject.name, branch: branchName }),
|
||||
body: JSON.stringify({ project: selectedProject.projectId, branch: branchName }),
|
||||
});
|
||||
|
||||
const data = await readJson<GitOperationResponse>(response);
|
||||
@@ -320,7 +324,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -351,7 +355,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -381,7 +385,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -411,7 +415,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
branch: currentBranch,
|
||||
}),
|
||||
});
|
||||
@@ -442,7 +446,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
file: filePath,
|
||||
}),
|
||||
});
|
||||
@@ -472,7 +476,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
file: filePath,
|
||||
}),
|
||||
});
|
||||
@@ -498,7 +502,7 @@ export function useGitPanelController({
|
||||
|
||||
try {
|
||||
const response = await fetchWithAuth(
|
||||
`/api/git/commits?project=${encodeURIComponent(selectedProject.name)}&limit=${RECENT_COMMITS_LIMIT}`,
|
||||
`/api/git/commits?project=${encodeURIComponent(selectedProject.projectId)}&limit=${RECENT_COMMITS_LIMIT}`,
|
||||
);
|
||||
const data = await readJson<GitCommitsResponse>(response);
|
||||
|
||||
@@ -518,7 +522,7 @@ export function useGitPanelController({
|
||||
|
||||
try {
|
||||
const response = await fetchWithAuth(
|
||||
`/api/git/commit-diff?project=${encodeURIComponent(selectedProject.name)}&commit=${commitHash}`,
|
||||
`/api/git/commit-diff?project=${encodeURIComponent(selectedProject.projectId)}&commit=${commitHash}`,
|
||||
);
|
||||
const data = await readJson<GitDiffResponse>(response);
|
||||
|
||||
@@ -546,7 +550,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
files,
|
||||
provider,
|
||||
}),
|
||||
@@ -578,7 +582,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
message,
|
||||
files,
|
||||
}),
|
||||
@@ -612,7 +616,7 @@ export function useGitPanelController({
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
project: selectedProject.name,
|
||||
project: selectedProject.projectId,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -645,7 +649,7 @@ export function useGitPanelController({
|
||||
|
||||
try {
|
||||
const response = await fetchWithAuth(
|
||||
`/api/git/file-with-diff?project=${encodeURIComponent(selectedProject.name)}&file=${encodeURIComponent(filePath)}`,
|
||||
`/api/git/file-with-diff?project=${encodeURIComponent(selectedProject.projectId)}&file=${encodeURIComponent(filePath)}`,
|
||||
);
|
||||
const data = await readJson<GitFileWithDiffResponse>(response);
|
||||
|
||||
|
||||
@@ -3,7 +3,9 @@ import { authenticatedFetch } from '../../../utils/api';
|
||||
import type { GitOperationResponse } from '../types/types';
|
||||
|
||||
type UseRevertLocalCommitOptions = {
|
||||
projectName: string | null;
|
||||
// DB primary key for the project; forwarded to the git API via the
|
||||
// `project` body param.
|
||||
projectId: string | null;
|
||||
onSuccess?: () => void;
|
||||
};
|
||||
|
||||
@@ -11,11 +13,11 @@ async function readJson<T>(response: Response): Promise<T> {
|
||||
return (await response.json()) as T;
|
||||
}
|
||||
|
||||
export function useRevertLocalCommit({ projectName, onSuccess }: UseRevertLocalCommitOptions) {
|
||||
export function useRevertLocalCommit({ projectId, onSuccess }: UseRevertLocalCommitOptions) {
|
||||
const [isRevertingLocalCommit, setIsRevertingLocalCommit] = useState(false);
|
||||
|
||||
const revertLatestLocalCommit = useCallback(async () => {
|
||||
if (!projectName) {
|
||||
if (!projectId) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -24,7 +26,7 @@ export function useRevertLocalCommit({ projectName, onSuccess }: UseRevertLocalC
|
||||
const response = await authenticatedFetch('/api/git/revert-local-commit', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ project: projectName }),
|
||||
body: JSON.stringify({ project: projectId }),
|
||||
});
|
||||
const data = await readJson<GitOperationResponse>(response);
|
||||
|
||||
@@ -39,7 +41,7 @@ export function useRevertLocalCommit({ projectName, onSuccess }: UseRevertLocalC
|
||||
} finally {
|
||||
setIsRevertingLocalCommit(false);
|
||||
}
|
||||
}, [onSuccess, projectName]);
|
||||
}, [onSuccess, projectId]);
|
||||
|
||||
return {
|
||||
isRevertingLocalCommit,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user