feat: setup unified classes for LLM providers and session processing, add tests for LLM unifier helper functions

This commit is contained in:
Haileyesus
2026-04-06 17:37:58 +03:00
parent 753c58fc1a
commit 6d00c17137
50 changed files with 4689 additions and 1721 deletions

View File

@@ -14,19 +14,19 @@ export function createServerApplication(): ServerApplication {
start: async () => {
// ----------------------------------------------
// Legacy backend Runner
logger.info('Bootstrapping backend via legacy runtime bridge', {
legacyRuntime: runtimePaths.legacyRuntimePath,
});
await import(pathToFileURL(runtimePaths.legacyRuntimePath).href);
// logger.info('Bootstrapping backend via legacy runtime bridge', {
// legacyRuntime: runtimePaths.legacyRuntimePath,
// });
// await import(pathToFileURL(runtimePaths.legacyRuntimePath).href);
// ----------------------------------------------
// ----------------------------------------------
// Refactor backend Runner
// logger.info('Bootstrapping backend via refactor runtime', {
// refactorRuntime: runtimePaths.refactorRuntimePath,
// });
// await import(pathToFileURL(runtimePaths.refactorRuntimePath).href);
logger.info('Bootstrapping backend via refactor runtime', {
refactorRuntime: runtimePaths.refactorRuntimePath,
});
await import(pathToFileURL(runtimePaths.refactorRuntimePath).href);
},
};
}

View File

@@ -6,7 +6,7 @@ import os from 'os';
import TOML from '@iarna/toml';
import { getCodexSessions } from '../../../projects.js';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { deleteSession as deleteSessionFromProviders } from '@/modules/sessions/sessions.service.js';
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
const router = express.Router();
@@ -72,7 +72,7 @@ router.get('/sessions', async (req, res) => {
router.delete('/sessions/:sessionId', async (req, res) => {
try {
const { sessionId } = req.params;
await deleteSessionFromProviders(sessionId);
await llmSessionsService.deleteSessionArtifacts(sessionId);
res.json({ success: true });
} catch (error) {
console.error(`Error deleting Codex session ${req.params.sessionId}:`, error);

View File

@@ -0,0 +1,233 @@
import path from 'node:path';
import { readFile } from 'node:fs/promises';
import { once } from 'node:events';
import spawn from 'cross-spawn';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { AppError } from '@/shared/utils/app-error.js';
type SearchResult = {
sessionId: string;
provider: string;
filePath: string;
lineNumber: number;
lineText: string;
};
type SearchInput = {
query: string;
provider?: string;
caseSensitive?: boolean;
limit?: number;
};
/**
* Normalizes file paths so DB session paths match ripgrep paths across platforms.
*/
const normalizePathForLookup = (filePath: string): string =>
process.platform === 'win32' ? path.normalize(filePath).toLowerCase() : path.normalize(filePath);
/**
* Searches all indexed session transcript files for a text query.
*/
export const conversationSearchService = {
/**
* Uses ripgrep first for speed, then falls back to direct file scanning.
*/
async search(input: SearchInput): Promise<SearchResult[]> {
const query = input.query.trim();
if (!query) {
throw new AppError('query is required.', {
code: 'SEARCH_QUERY_REQUIRED',
statusCode: 400,
});
}
const limit = Math.min(Math.max(input.limit ?? 50, 1), 500);
const allSessions = sessionsDb
.getAllSessions()
.filter((session) => Boolean(session.jsonl_path))
.filter((session) => (input.provider ? session.provider === input.provider : true));
if (allSessions.length === 0) {
return [];
}
const sessionByFile = new Map(
allSessions
.filter((session): session is typeof session & { jsonl_path: string } => Boolean(session.jsonl_path))
.map((session) => [normalizePathForLookup(session.jsonl_path), session]),
);
const uniqueDirectories = [...new Set(allSessions.map((session) => path.dirname(session.jsonl_path as string)))];
const rgResults = await runRipgrepSearch(query, uniqueDirectories, {
caseSensitive: input.caseSensitive ?? false,
limit,
});
if (rgResults.length > 0) {
const mappedResults: SearchResult[] = [];
for (const match of rgResults) {
const session = sessionByFile.get(normalizePathForLookup(match.filePath));
if (!session) {
continue;
}
mappedResults.push({
sessionId: session.session_id,
provider: session.provider,
filePath: match.filePath,
lineNumber: match.lineNumber,
lineText: match.lineText,
});
if (mappedResults.length >= limit) {
break;
}
}
return mappedResults;
}
return fallbackFileSearch(query, sessionByFile, {
caseSensitive: input.caseSensitive ?? false,
limit,
});
},
};
/**
* Runs ripgrep in JSON mode and maps each match to a minimal search shape.
*/
async function runRipgrepSearch(
query: string,
directories: string[],
options: {
caseSensitive: boolean;
limit: number;
},
): Promise<Array<{ filePath: string; lineNumber: number; lineText: string }>> {
const args = ['--json', '--line-number', '--no-heading'];
if (!options.caseSensitive) {
args.push('-i');
}
args.push('--max-count', String(options.limit), '--', query, ...directories);
const child = spawn('rg', args, {
stdio: ['ignore', 'pipe', 'pipe'],
cwd: process.cwd(),
env: process.env,
});
let stdout = '';
child.stdout?.on('data', (chunk) => {
stdout += chunk.toString();
});
try {
const closePromise = once(child, 'close');
const errorPromise = once(child, 'error').then(([error]) => {
throw error;
});
await Promise.race([closePromise, errorPromise]);
} catch {
return [];
}
if (child.exitCode !== 0 && child.exitCode !== 1) {
return [];
}
const matches: Array<{ filePath: string; lineNumber: number; lineText: string }> = [];
for (const line of stdout.split(/\r?\n/)) {
if (!line.trim()) {
continue;
}
let parsed: any;
try {
parsed = JSON.parse(line);
} catch {
continue;
}
if (parsed?.type !== 'match') {
continue;
}
const filePath = parsed?.data?.path?.text;
const lineNumber = parsed?.data?.line_number;
const lineText = parsed?.data?.lines?.text;
if (
typeof filePath !== 'string' ||
typeof lineNumber !== 'number' ||
typeof lineText !== 'string'
) {
continue;
}
matches.push({
filePath,
lineNumber,
lineText: lineText.trimEnd(),
});
if (matches.length >= options.limit) {
break;
}
}
return matches;
}
/**
* Fallback search path when ripgrep is unavailable or returns no structured matches.
*/
async function fallbackFileSearch(
query: string,
sessionByFile: Map<string, { session_id: string; provider: string; jsonl_path: string | null }>,
options: {
caseSensitive: boolean;
limit: number;
},
): Promise<SearchResult[]> {
const results: SearchResult[] = [];
const queryForMatch = options.caseSensitive ? query : query.toLowerCase();
for (const [, session] of sessionByFile) {
if (!session.jsonl_path) {
continue;
}
const content = await readFile(session.jsonl_path, 'utf8');
const lines = content.split(/\r?\n/);
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index];
const source = options.caseSensitive ? line : line.toLowerCase();
if (!source.includes(queryForMatch)) {
continue;
}
results.push({
sessionId: session.session_id,
provider: session.provider,
filePath: session.jsonl_path,
lineNumber: index + 1,
lineText: line,
});
if (results.length >= options.limit) {
return results;
}
}
}
return results;
}

View File

@@ -0,0 +1,65 @@
import express, { type NextFunction, type Request, type Response } from 'express';
import { asyncHandler } from '@/shared/http/async-handler.js';
import { AppError } from '@/shared/utils/app-error.js';
import { createApiErrorResponse, createApiSuccessResponse } from '@/shared/http/api-response.js';
import { logger } from '@/shared/utils/logger.js';
import { conversationSearchService } from '@/modules/conversations/conversation-search.service.js';
const router = express.Router();
router.get(
'/search',
asyncHandler(async (req: Request, res: Response) => {
const query = typeof req.query.query === 'string' ? req.query.query : '';
const provider =
typeof req.query.provider === 'string' ? req.query.provider.trim().toLowerCase() : undefined;
const caseSensitive = req.query.caseSensitive === 'true';
const limit =
typeof req.query.limit === 'string' && Number.isFinite(Number.parseInt(req.query.limit, 10))
? Number.parseInt(req.query.limit, 10)
: undefined;
const results = await conversationSearchService.search({
query,
provider,
caseSensitive,
limit,
});
res.json(
createApiSuccessResponse({
query,
provider: provider ?? null,
count: results.length,
results,
}),
);
}),
);
/**
* Normalizes route-level failures to a consistent JSON API shape.
*/
router.use((error: unknown, _req: Request, res: Response, _next: NextFunction) => {
if (res.headersSent) {
return;
}
if (error instanceof AppError) {
res
.status(error.statusCode)
.json(createApiErrorResponse(error.code, error.message, undefined, error.details));
return;
}
const message =
error instanceof Error ? error.message : 'Unexpected conversations route failure.';
logger.error(message, {
module: 'conversations.routes',
});
res.status(500).json(createApiErrorResponse('INTERNAL_ERROR', message));
});
export default router;

View File

@@ -1,6 +1,6 @@
import express from 'express';
import sessionManager from '../../../sessionManager.js';
import { deleteSession as deleteSessionFromProviders } from '@/modules/sessions/sessions.service.js';
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
const router = express.Router();
@@ -13,7 +13,7 @@ router.delete('/sessions/:sessionId', async (req, res) => {
}
await sessionManager.deleteSession(sessionId);
await deleteSessionFromProviders(sessionId);
await llmSessionsService.deleteSessionArtifacts(sessionId);
res.json({ success: true });
} catch (error) {
console.error(`Error deleting Gemini session ${req.params.sessionId}:`, error);

View File

@@ -0,0 +1,338 @@
import assert from 'node:assert/strict';
import fs from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import test from 'node:test';
import { AppError } from '../../shared/utils/app-error.js';
import { llmService } from './llm.service.js';
import { CursorProvider } from './providers/cursor.provider.js';
import { GeminiProvider } from './providers/gemini.provider.js';
import { CodexProvider } from './providers/codex.provider.js';
import { ClaudeProvider } from './providers/claude.provider.js';
const asyncEvents = async function* (events: unknown[]) {
for (const event of events) {
yield event;
}
};
// This test covers Cursor start/resume command construction, including yolo/model/resume flags.
test('cursor provider builds start/resume CLI invocations correctly', () => {
const provider = new CursorProvider() as any;
const start = provider.createCliInvocation({
prompt: 'build feature',
sessionId: 'cursor-session-1',
isResume: false,
model: 'composer-2',
allowYolo: true,
workspacePath: '/tmp/workspace',
});
assert.equal(start.command, 'cursor-agent');
assert.deepEqual(start.args, [
'--print',
'--trust',
'--output-format',
'stream-json',
'--yolo',
'--model',
'composer-2',
'build feature',
]);
const resume = provider.createCliInvocation({
prompt: 'continue',
sessionId: 'cursor-session-1',
isResume: true,
workspacePath: '/tmp/workspace',
});
assert.equal(resume.command, 'cursor-agent');
assert.deepEqual(resume.args, [
'--print',
'--trust',
'--output-format',
'stream-json',
'--resume',
'cursor-session-1',
'continue',
]);
});
// This test covers Cursor model-list parsing, including ANSI stripping and current/default flags.
test('cursor provider parses model list output into normalized models', async () => {
const provider = new CursorProvider() as any;
provider.runCommandForOutput = async () => [
'\u001b[32mAvailable models\u001b[0m',
'auto - Auto (current)',
'composer-2-fast - Composer 2 Fast (default)',
'Tip: use --model',
].join('\n');
const models = await provider.listModels();
assert.equal(models.length, 2);
assert.deepEqual(models[0], {
value: 'auto',
displayName: 'auto',
description: 'Auto',
current: true,
default: false,
supportsThinkingModes: false,
supportedThinkingModes: [],
});
assert.equal(models[1].value, 'composer-2-fast');
assert.equal(models[1].default, true);
});
// This test covers Gemini start/resume CLI construction and curated model list contract.
test('gemini provider builds start/resume CLI invocations and exposes curated models', async () => {
const provider = new GeminiProvider() as any;
const start = provider.createCliInvocation({
prompt: 'explain architecture',
sessionId: 'gemini-session-1',
isResume: false,
model: 'gemini-2.5-pro',
workspacePath: '/tmp/workspace',
});
assert.equal(start.command, 'gemini');
assert.deepEqual(start.args, [
'--prompt',
'explain architecture',
'--output-format',
'stream-json',
'--model',
'gemini-2.5-pro',
]);
const resume = provider.createCliInvocation({
prompt: 'continue',
sessionId: 'gemini-session-1',
isResume: true,
workspacePath: '/tmp/workspace',
});
assert.deepEqual(resume.args, [
'--prompt',
'continue',
'--output-format',
'stream-json',
'--resume',
'gemini-session-1',
]);
const models = await provider.listModels();
assert.ok(models.some((model: { value?: string }) => model.value === 'gemini-2.5-pro'));
});
// This test covers Codex start/resume behavior and abort-controller based stop behavior.
test('codex provider start/resume use correct SDK thread methods and stop aborts signal', async () => {
const provider = new CodexProvider() as any;
const calls: Array<{ fn: 'start' | 'resume'; sessionId?: string; options: Record<string, unknown> }> = [];
let capturedSignal: AbortSignal | undefined;
const fakeThread = {
async runStreamed(_prompt: string, options?: { signal?: AbortSignal }) {
capturedSignal = options?.signal;
return { events: asyncEvents([{ type: 'chunk' }]) };
},
};
provider.loadCodexSdkModule = async () => ({
Codex: class {
startThread(options?: Record<string, unknown>) {
calls.push({ fn: 'start', options: options ?? {} });
return fakeThread;
}
resumeThread(sessionId: string, options?: Record<string, unknown>) {
calls.push({ fn: 'resume', sessionId, options: options ?? {} });
return fakeThread;
}
},
});
const startExec = await provider.createSdkExecution({
prompt: 'start codex',
sessionId: 'codex-session-1',
isResume: false,
model: 'gpt-5.4',
thinkingMode: 'high',
workspacePath: '/tmp/workspace',
});
assert.equal(calls[0]?.fn, 'start');
assert.equal(calls[0]?.options.model, 'gpt-5.4');
assert.equal(calls[0]?.options.modelReasoningEffort, 'high');
assert.equal(calls[0]?.options.workingDirectory, '/tmp/workspace');
assert.equal(await startExec.stop(), true);
assert.equal(capturedSignal?.aborted, true);
await provider.createSdkExecution({
prompt: 'resume codex',
sessionId: 'codex-session-1',
isResume: true,
workspacePath: '/tmp/workspace',
});
assert.equal(calls[1]?.fn, 'resume');
assert.equal(calls[1]?.sessionId, 'codex-session-1');
});
// This test covers Codex model-list loading from ~/.codex/models_cache.json and model-shape mapping.
test('codex provider reads models_cache.json and maps model metadata', async () => {
const provider = new CodexProvider();
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'codex-models-'));
const codexDir = path.join(tempRoot, '.codex');
await fs.mkdir(codexDir, { recursive: true });
await fs.writeFile(
path.join(codexDir, 'models_cache.json'),
JSON.stringify({
models: [
{
slug: 'gpt-5.4',
display_name: 'GPT-5.4',
description: 'Latest frontier agentic coding model.',
priority: 1,
supported_reasoning_levels: [
{ effort: 'low' },
{ effort: 'medium' },
{ effort: 'high' },
],
},
],
}),
'utf8',
);
const originalHomeDir = os.homedir;
(os as any).homedir = () => tempRoot;
try {
const models = await provider.listModels();
assert.equal(models.length, 1);
assert.equal(models[0]?.value, 'gpt-5.4');
assert.equal(models[0]?.default, true);
assert.deepEqual(models[0]?.supportedThinkingModes, ['low', 'medium', 'high']);
} finally {
(os as any).homedir = originalHomeDir;
await fs.rm(tempRoot, { recursive: true, force: true });
}
});
// This test covers persisted session-level model/thinking preferences flowing into Codex thread options.
test('codex provider applies saved model/thinking preferences on subsequent launch', async () => {
const provider = new CodexProvider() as any;
let threadOptions: Record<string, unknown> | null = null;
provider.loadCodexSdkModule = async () => ({
Codex: class {
startThread(options?: Record<string, unknown>) {
threadOptions = options ?? null;
return {
async runStreamed() {
return { events: asyncEvents([]) };
},
};
}
resumeThread() {
return {
async runStreamed() {
return { events: asyncEvents([]) };
},
};
}
},
});
await provider.setSessionModel('codex-pref-1', 'gpt-5.4');
await provider.setSessionThinkingMode('codex-pref-1', 'xhigh');
await provider.launchSession({
prompt: 'use stored preferences',
sessionId: 'codex-pref-1',
});
assert.ok(threadOptions);
assert.equal((threadOptions as { model?: string }).model, 'gpt-5.4');
assert.equal((threadOptions as { modelReasoningEffort?: string }).modelReasoningEffort, 'xhigh');
});
// This test covers Claude thinking-level mapping, runtime permission handlers, and model/event normalization.
test('claude provider helper mappings match unifier contract', async () => {
const provider = new ClaudeProvider() as any;
assert.equal(provider.resolveClaudeEffort(undefined), 'high');
assert.equal(provider.resolveClaudeEffort('low'), 'low');
assert.equal(provider.resolveClaudeEffort('not-real'), 'high');
const allowHandler = provider.resolvePermissionHandler('allow');
const denyHandler = provider.resolvePermissionHandler('deny');
const askHandler = provider.resolvePermissionHandler('ask');
assert.equal(typeof allowHandler, 'function');
assert.equal(typeof denyHandler, 'function');
assert.equal(askHandler, undefined);
const allowResult = await allowHandler?.();
const denyResult = await denyHandler?.();
assert.deepEqual(allowResult, { behavior: 'allow' });
assert.equal(denyResult?.behavior, 'deny');
const mappedModel = provider.mapModelInfo({
value: 'default',
displayName: 'Default',
description: 'Default Claude model',
supportsEffort: true,
supportedEffortLevels: ['low', 'medium', 'high', 'max'],
});
assert.equal(mappedModel.value, 'default');
assert.equal(mappedModel.supportsThinkingModes, true);
assert.deepEqual(mappedModel.supportedThinkingModes, ['low', 'medium', 'high', 'max']);
const mappedEvent = provider.mapSdkEvent({ type: 'message', subtype: 'delta' });
assert.equal(mappedEvent?.message, 'message:delta');
});
// This test covers service-level capability validation for runtime permissions and thinking mode support.
test('llmService rejects unsupported runtime permission and thinking mode combinations', async () => {
await assert.rejects(
llmService.startSession('cursor', {
prompt: 'hello',
runtimePermissionMode: 'allow',
}),
(error: unknown) =>
error instanceof AppError &&
error.code === 'RUNTIME_PERMISSION_NOT_SUPPORTED' &&
error.statusCode === 400,
);
await assert.rejects(
llmService.startSession('cursor', {
prompt: 'hello',
thinkingMode: 'high',
}),
(error: unknown) =>
error instanceof AppError &&
error.code === 'THINKING_MODE_NOT_SUPPORTED' &&
error.statusCode === 400,
);
});
// This test covers model/thinking capability gates on providers before any external process/SDK usage.
test('providers enforce capability gates for model/thinking updates', async () => {
const claudeProvider = new ClaudeProvider();
const cursorProvider = new CursorProvider();
await assert.rejects(
cursorProvider.setSessionThinkingMode('cursor-session', 'high'),
(error: unknown) =>
error instanceof AppError &&
error.code === 'THINKING_MODE_NOT_SUPPORTED' &&
error.statusCode === 400,
);
await claudeProvider.setSessionModel('claude-session', 'sonnet');
const preference = (claudeProvider as any).getSessionPreference('claude-session');
assert.equal(preference.model, 'sonnet');
});

View File

@@ -0,0 +1,327 @@
import assert from 'node:assert/strict';
import fs from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import test from 'node:test';
import { AppError } from '../../shared/utils/app-error.js';
import { scanStateDb } from '../../shared/database/repositories/scan-state.db.js';
import { sessionsDb } from '../../shared/database/repositories/sessions.db.js';
import { llmSessionsService } from './sessions.service.js';
import { sessionIndexers } from './session-indexers/index.js';
import { conversationSearchService } from '../conversations/conversation-search.service.js';
import type { ISessionIndexer } from './session-indexers/session-indexer.interface.js';
const patchMethod = <T extends object, K extends keyof T>(target: T, key: K, replacement: T[K]) => {
const original = target[key];
(target as any)[key] = replacement;
return () => {
(target as any)[key] = original;
};
};
const patchIndexers = (nextIndexers: ISessionIndexer[]) => {
const originalIndexers = [...sessionIndexers];
sessionIndexers.splice(0, sessionIndexers.length, ...nextIndexers);
return () => {
sessionIndexers.splice(0, sessionIndexers.length, ...originalIndexers);
};
};
// This test covers multi-provider synchronization orchestration and failure aggregation.
test('llmSessionsService.synchronizeSessions aggregates processed counts and failures', { concurrency: false }, async () => {
let updateLastScannedAtCalls = 0;
const restoreScanDate = patchMethod(scanStateDb, 'getLastScannedAt', () => new Date('2026-04-01T00:00:00.000Z'));
const restoreUpdateScanDate = patchMethod(scanStateDb, 'updateLastScannedAt', () => {
updateLastScannedAtCalls += 1;
});
const restoreIndexers = patchIndexers([
{
provider: 'claude',
async synchronize() {
return 3;
},
},
{
provider: 'codex',
async synchronize() {
throw new Error('codex index failed');
},
},
]);
try {
const result = await llmSessionsService.synchronizeSessions();
assert.equal(result.processedByProvider.claude, 3);
assert.equal(result.processedByProvider.codex, 0);
assert.equal(result.processedByProvider.cursor, 0);
assert.equal(result.processedByProvider.gemini, 0);
assert.equal(result.failures.length, 1);
assert.equal(result.failures[0], 'codex index failed');
assert.equal(updateLastScannedAtCalls, 1);
} finally {
restoreIndexers();
restoreUpdateScanDate();
restoreScanDate();
}
});
// This test covers provider-specific sync behavior for both incremental and full-rescan modes.
test('llmSessionsService.synchronizeProvider honors fullRescan option', { concurrency: false }, async () => {
const observedScanDates: Array<Date | null> = [];
const restoreScanDate = patchMethod(scanStateDb, 'getLastScannedAt', () => new Date('2026-04-02T00:00:00.000Z'));
const restoreUpdateScanDate = patchMethod(scanStateDb, 'updateLastScannedAt', () => {});
const restoreIndexers = patchIndexers([
{
provider: 'cursor',
async synchronize(lastScanAt) {
observedScanDates.push(lastScanAt);
return 7;
},
},
]);
try {
const incremental = await llmSessionsService.synchronizeProvider('cursor');
const fullRescan = await llmSessionsService.synchronizeProvider('cursor', { fullRescan: true });
assert.equal(incremental.provider, 'cursor');
assert.equal(incremental.processed, 7);
assert.equal(fullRescan.provider, 'cursor');
assert.equal(fullRescan.processed, 7);
assert.equal(observedScanDates.length, 2);
assert.ok(observedScanDates[0] instanceof Date);
assert.equal(observedScanDates[1], null);
} finally {
restoreIndexers();
restoreUpdateScanDate();
restoreScanDate();
}
});
// This test covers session rename persistence and not-found guardrails.
test('llmSessionsService.updateSessionCustomName validates existence before updating', { concurrency: false }, () => {
let updated: { sessionId: string; customName: string } | null = null;
const restoreGetById = patchMethod(sessionsDb, 'getSessionById', (sessionId: string) => (
sessionId === 'known-session'
? {
session_id: 'known-session',
provider: 'claude',
workspace_path: '/tmp/workspace',
jsonl_path: null,
created_at: '2026-04-01T00:00:00.000Z',
updated_at: '2026-04-01T00:00:00.000Z',
}
: null
));
const restoreUpdateName = patchMethod(sessionsDb, 'updateSessionCustomName', (sessionId: string, customName: string) => {
updated = { sessionId, customName };
});
try {
llmSessionsService.updateSessionCustomName('known-session', 'New Session Name');
assert.deepEqual(updated, {
sessionId: 'known-session',
customName: 'New Session Name',
});
assert.throws(
() => llmSessionsService.updateSessionCustomName('missing-session', 'Nope'),
(error: unknown) =>
error instanceof AppError &&
error.code === 'SESSION_NOT_FOUND' &&
error.statusCode === 404,
);
} finally {
restoreUpdateName();
restoreGetById();
}
});
// This test covers delete behavior using only DB jsonl_path, including invalid id validation.
test('llmSessionsService.deleteSessionArtifacts validates ids and deletes disk/db artifacts', { concurrency: false }, async () => {
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-delete-session-'));
const transcriptPath = path.join(tempRoot, 'session.jsonl');
await fs.writeFile(transcriptPath, '{"ok":true}\n', 'utf8');
let deletedSessionId: string | null = null;
const restoreGetById = patchMethod(sessionsDb, 'getSessionById', (sessionId: string) => (
sessionId === 'session-123'
? {
session_id: 'session-123',
provider: 'cursor',
workspace_path: '/tmp/workspace',
jsonl_path: transcriptPath,
created_at: '2026-04-01T00:00:00.000Z',
updated_at: '2026-04-01T00:00:00.000Z',
}
: null
));
const restoreDelete = patchMethod(sessionsDb, 'deleteSession', (sessionId: string) => {
deletedSessionId = sessionId;
});
try {
await assert.rejects(
llmSessionsService.deleteSessionArtifacts('../invalid'),
(error: unknown) =>
error instanceof AppError &&
error.code === 'INVALID_SESSION_ID' &&
error.statusCode === 400,
);
const deleted = await llmSessionsService.deleteSessionArtifacts('session-123');
assert.equal(deleted.sessionId, 'session-123');
assert.equal(deleted.deletedFromDatabase, true);
assert.equal(deleted.deletedFromDisk, true);
assert.equal(deletedSessionId, 'session-123');
await assert.rejects(fs.access(transcriptPath));
const missing = await llmSessionsService.deleteSessionArtifacts('session-404');
assert.equal(missing.deletedFromDatabase, false);
assert.equal(missing.deletedFromDisk, false);
} finally {
restoreDelete();
restoreGetById();
await fs.rm(tempRoot, { recursive: true, force: true });
}
});
// This test covers session-history parsing for JSONL (including malformed lines) and Gemini JSON files.
test('llmSessionsService.getSessionHistory parses JSONL and Gemini JSON correctly', { concurrency: false }, async () => {
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-history-'));
const jsonlPath = path.join(tempRoot, 'session.jsonl');
const jsonPath = path.join(tempRoot, 'gemini.json');
await fs.writeFile(jsonlPath, '{"message":"hello"}\nnot-json\n', 'utf8');
await fs.writeFile(jsonPath, '{"messages":[{"text":"hi"}]}', 'utf8');
const restoreGetById = patchMethod(sessionsDb, 'getSessionById', (sessionId: string) => {
if (sessionId === 'jsonl-session') {
return {
session_id: 'jsonl-session',
provider: 'cursor',
workspace_path: '/tmp/workspace',
jsonl_path: jsonlPath,
created_at: '2026-04-01T00:00:00.000Z',
updated_at: '2026-04-01T00:00:00.000Z',
};
}
if (sessionId === 'json-session') {
return {
session_id: 'json-session',
provider: 'gemini',
workspace_path: '/tmp/workspace',
jsonl_path: jsonPath,
created_at: '2026-04-01T00:00:00.000Z',
updated_at: '2026-04-01T00:00:00.000Z',
};
}
if (sessionId === 'missing-history-path') {
return {
session_id: 'missing-history-path',
provider: 'claude',
workspace_path: '/tmp/workspace',
jsonl_path: null,
created_at: '2026-04-01T00:00:00.000Z',
updated_at: '2026-04-01T00:00:00.000Z',
};
}
return null;
});
try {
const jsonlHistory = await llmSessionsService.getSessionHistory('jsonl-session');
assert.equal(jsonlHistory.fileType, 'jsonl');
assert.equal(Array.isArray(jsonlHistory.entries), true);
assert.equal(jsonlHistory.entries.length, 2);
assert.deepEqual(jsonlHistory.entries[0], { message: 'hello' });
assert.deepEqual(jsonlHistory.entries[1], { raw: 'not-json', parseError: true });
const geminiHistory = await llmSessionsService.getSessionHistory('json-session');
assert.equal(geminiHistory.fileType, 'json');
assert.equal(geminiHistory.entries.length, 1);
assert.deepEqual(geminiHistory.entries[0], { messages: [{ text: 'hi' }] });
await assert.rejects(
llmSessionsService.getSessionHistory('unknown-session'),
(error: unknown) =>
error instanceof AppError &&
error.code === 'SESSION_NOT_FOUND' &&
error.statusCode === 404,
);
await assert.rejects(
llmSessionsService.getSessionHistory('missing-history-path'),
(error: unknown) =>
error instanceof AppError &&
error.code === 'SESSION_HISTORY_NOT_AVAILABLE' &&
error.statusCode === 404,
);
} finally {
restoreGetById();
await fs.rm(tempRoot, { recursive: true, force: true });
}
});
// This test covers conversation search over indexed transcript files with provider/case filters.
test('conversationSearchService searches indexed transcripts with provider and case filters', { concurrency: false }, async () => {
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-search-'));
const cursorPath = path.join(tempRoot, 'cursor.jsonl');
const codexPath = path.join(tempRoot, 'codex.jsonl');
await fs.writeFile(cursorPath, 'hello world\nNeedle lower\n', 'utf8');
await fs.writeFile(codexPath, 'HELLO WORLD\nNEEDLE UPPER\n', 'utf8');
const restoreGetAll = patchMethod(sessionsDb, 'getAllSessions', () => ([
{
session_id: 'cursor-s',
provider: 'cursor',
workspace_path: '/tmp/workspace-cursor',
jsonl_path: cursorPath,
custom_name: null,
created_at: '2026-04-01T00:00:00.000Z',
updated_at: '2026-04-01T00:00:00.000Z',
},
{
session_id: 'codex-s',
provider: 'codex',
workspace_path: '/tmp/workspace-codex',
jsonl_path: codexPath,
custom_name: null,
created_at: '2026-04-01T00:00:00.000Z',
updated_at: '2026-04-01T00:00:00.000Z',
},
]));
try {
await assert.rejects(
conversationSearchService.search({ query: ' ' }),
(error: unknown) =>
error instanceof AppError &&
error.code === 'SEARCH_QUERY_REQUIRED' &&
error.statusCode === 400,
);
const anyProviderResults = await conversationSearchService.search({
query: 'needle',
caseSensitive: false,
limit: 20,
});
assert.ok(anyProviderResults.some((entry) => entry.sessionId === 'cursor-s'));
assert.ok(anyProviderResults.some((entry) => entry.sessionId === 'codex-s'));
const codexOnlyResults = await conversationSearchService.search({
query: 'NEEDLE',
caseSensitive: true,
provider: 'codex',
limit: 20,
});
assert.ok(codexOnlyResults.length >= 1);
assert.ok(codexOnlyResults.every((entry) => entry.provider === 'codex'));
} finally {
restoreGetAll();
await fs.rm(tempRoot, { recursive: true, force: true });
}
});

View File

@@ -0,0 +1,42 @@
import type { IProvider } from '@/modules/llm/providers/provider.interface.js';
import { ClaudeProvider } from '@/modules/llm/providers/claude.provider.js';
import { CodexProvider } from '@/modules/llm/providers/codex.provider.js';
import { CursorProvider } from '@/modules/llm/providers/cursor.provider.js';
import { GeminiProvider } from '@/modules/llm/providers/gemini.provider.js';
import type { LLMProvider } from '@/shared/types/app.js';
import { AppError } from '@/shared/utils/app-error.js';
const providers: Record<LLMProvider, IProvider> = {
claude: new ClaudeProvider(),
codex: new CodexProvider(),
cursor: new CursorProvider(),
gemini: new GeminiProvider(),
};
/**
* Central registry for resolving provider implementations by id.
*/
export const llmProviderRegistry = {
/**
* Returns all registered providers.
*/
listProviders(): IProvider[] {
return Object.values(providers);
},
/**
* Resolves one provider or throws a typed 400 error.
*/
resolveProvider(provider: string): IProvider {
const key = provider as LLMProvider;
const resolvedProvider = providers[key];
if (!resolvedProvider) {
throw new AppError(`Unsupported provider "${provider}".`, {
code: 'UNSUPPORTED_PROVIDER',
statusCode: 400,
});
}
return resolvedProvider;
},
};

View File

@@ -0,0 +1,295 @@
import express, { type NextFunction, type Request, type Response } from 'express';
import { asyncHandler } from '@/shared/http/async-handler.js';
import { AppError } from '@/shared/utils/app-error.js';
import { createApiErrorResponse, createApiSuccessResponse } from '@/shared/http/api-response.js';
import { llmService } from '@/modules/llm/llm.service.js';
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
import { logger } from '@/shared/utils/logger.js';
const router = express.Router();
/**
* Safely reads an Express path parameter that may arrive as string or string[].
*/
const readPathParam = (value: unknown, name: string): string => {
if (typeof value === 'string') {
return value;
}
if (Array.isArray(value) && typeof value[0] === 'string') {
return value[0];
}
throw new AppError(`${name} path parameter is invalid.`, {
code: 'INVALID_PATH_PARAMETER',
statusCode: 400,
});
};
const normalizeProviderParam = (value: unknown): string =>
readPathParam(value, 'provider').trim().toLowerCase();
/**
* Allows callers to block until a launched/resumed session reaches a final state.
*/
const parseWaitForCompletion = (req: Request): boolean => {
const value = (req.body as Record<string, unknown> | undefined)?.waitForCompletion;
return value === true;
};
/**
* Validates and normalizes rename payload.
*/
const parseRenamePayload = (payload: unknown): { summary: string } => {
if (!payload || typeof payload !== 'object') {
throw new AppError('Request body must be an object.', {
code: 'INVALID_REQUEST_BODY',
statusCode: 400,
});
}
const body = payload as Record<string, unknown>;
const summary = typeof body.summary === 'string' ? body.summary.trim() : '';
if (!summary) {
throw new AppError('summary is required.', {
code: 'SUMMARY_REQUIRED',
statusCode: 400,
});
}
if (summary.length > 500) {
throw new AppError('summary must not exceed 500 characters.', {
code: 'SUMMARY_TOO_LONG',
statusCode: 400,
});
}
return { summary };
};
router.get(
'/providers',
asyncHandler(async (_req: Request, res: Response) => {
res.json(createApiSuccessResponse({ providers: llmService.listProviders() }));
}),
);
router.get(
'/providers/:provider/models',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const models = await llmService.listModels(provider);
res.json(createApiSuccessResponse({ provider, models }));
}),
);
router.get(
'/providers/:provider/sessions',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const sessions = llmService.listSessions(provider);
res.json(createApiSuccessResponse({ provider, sessions }));
}),
);
router.get(
'/providers/:provider/sessions/:sessionId',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const session = llmService.getSession(provider, sessionId);
if (!session) {
throw new AppError(`Session "${sessionId}" not found for provider "${provider}".`, {
code: 'SESSION_NOT_FOUND',
statusCode: 404,
});
}
res.json(createApiSuccessResponse({ provider, session }));
}),
);
router.post(
'/providers/:provider/sessions/start',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const snapshot = await llmService.startSession(provider, req.body);
const waitForCompletion = parseWaitForCompletion(req);
if (!waitForCompletion) {
res.status(202).json(
createApiSuccessResponse({
provider,
session: snapshot,
}),
);
return;
}
const completedSnapshot = await llmService.waitForSession(provider, snapshot.sessionId);
res.json(createApiSuccessResponse({ provider, session: completedSnapshot ?? snapshot }));
}),
);
router.post(
'/providers/:provider/sessions/:sessionId/resume',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const snapshot = await llmService.resumeSession(provider, sessionId, req.body);
const waitForCompletion = parseWaitForCompletion(req);
if (!waitForCompletion) {
res.status(202).json(createApiSuccessResponse({ provider, session: snapshot }));
return;
}
const completedSnapshot = await llmService.waitForSession(provider, sessionId);
res.json(createApiSuccessResponse({ provider, session: completedSnapshot ?? snapshot }));
}),
);
router.post(
'/providers/:provider/sessions/:sessionId/stop',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const stopped = await llmService.stopSession(provider, sessionId);
res.json(createApiSuccessResponse({ provider, sessionId, stopped }));
}),
);
router.patch(
'/providers/:provider/sessions/:sessionId/model',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const model = typeof req.body?.model === 'string' ? req.body.model.trim() : '';
if (!model) {
throw new AppError('model is required.', {
code: 'MODEL_REQUIRED',
statusCode: 400,
});
}
await llmService.setSessionModel(provider, sessionId, model);
res.json(
createApiSuccessResponse({
provider,
sessionId,
model,
}),
);
}),
);
router.patch(
'/providers/:provider/sessions/:sessionId/thinking',
asyncHandler(async (req: Request, res: Response) => {
const provider = normalizeProviderParam(req.params.provider);
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const thinkingMode =
typeof req.body?.thinkingMode === 'string' ? req.body.thinkingMode.trim() : '';
if (!thinkingMode) {
throw new AppError('thinkingMode is required.', {
code: 'THINKING_MODE_REQUIRED',
statusCode: 400,
});
}
await llmService.setSessionThinkingMode(provider, sessionId, thinkingMode);
res.json(
createApiSuccessResponse({
provider,
sessionId,
thinkingMode,
}),
);
}),
);
router.get(
'/sessions/:sessionId/history',
asyncHandler(async (req: Request, res: Response) => {
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const history = await llmSessionsService.getSessionHistory(sessionId);
res.json(createApiSuccessResponse(history));
}),
);
/**
* Renames one indexed session by writing the custom summary into DB.
*/
router.put(
'/sessions/:sessionId/rename',
asyncHandler(async (req: Request, res: Response) => {
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const { summary } = parseRenamePayload(req.body);
llmSessionsService.updateSessionCustomName(sessionId, summary);
res.json(createApiSuccessResponse({ sessionId, summary }));
}),
);
/**
* Returns DB-indexed sessions discovered by the session-processor scan.
*/
router.get(
'/sessions/index',
asyncHandler(async (req: Request, res: Response) => {
const provider =
typeof req.query.provider === 'string' ? req.query.provider.trim().toLowerCase() : undefined;
const sessions = llmSessionsService.listIndexedSessions(provider);
res.json(createApiSuccessResponse({ provider: provider ?? null, sessions }));
}),
);
/**
* Triggers provider disk scans and refreshes the shared sessions table.
*/
router.post(
'/sessions/sync',
asyncHandler(async (_req: Request, res: Response) => {
const syncResult = await llmSessionsService.synchronizeSessions();
res.json(createApiSuccessResponse(syncResult));
}),
);
/**
* Deletes provider-specific session artifacts and removes the DB row.
*/
router.delete(
'/sessions/:sessionId',
asyncHandler(async (req: Request, res: Response) => {
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
const result = await llmSessionsService.deleteSessionArtifacts(sessionId);
res.json(createApiSuccessResponse(result));
}),
);
/**
* Normalizes route-level failures to a consistent JSON API shape.
*/
router.use((error: unknown, _req: Request, res: Response, _next: NextFunction) => {
if (res.headersSent) {
return;
}
if (error instanceof AppError) {
res
.status(error.statusCode)
.json(createApiErrorResponse(error.code, error.message, undefined, error.details));
return;
}
const message = error instanceof Error ? error.message : 'Unexpected LLM route failure.';
logger.error(message, {
module: 'llm.routes',
});
res.status(500).json(createApiErrorResponse('INTERNAL_ERROR', message));
});
export default router;

View File

@@ -0,0 +1,180 @@
import type { LLMProvider } from '@/shared/types/app.js';
import { AppError } from '@/shared/utils/app-error.js';
import { llmProviderRegistry } from '@/modules/llm/llm.registry.js';
import type {
ProviderModel,
ProviderSessionSnapshot,
RuntimePermissionMode,
StartSessionInput,
} from '@/modules/llm/providers/provider.interface.js';
/**
* Converts unknown request values into optional trimmed strings.
*/
const normalizeOptionalString = (value: unknown): string | undefined => {
if (typeof value !== 'string') {
return undefined;
}
const normalized = value.trim();
return normalized.length > 0 ? normalized : undefined;
};
/**
* Validates and normalizes runtime permission mode.
*/
const normalizePermissionMode = (value: unknown): RuntimePermissionMode | undefined => {
const normalized = normalizeOptionalString(value);
if (!normalized) {
return undefined;
}
if (normalized === 'ask' || normalized === 'allow' || normalized === 'deny') {
return normalized;
}
throw new AppError(`Unsupported runtimePermissionMode "${normalized}".`, {
code: 'INVALID_RUNTIME_PERMISSION_MODE',
statusCode: 400,
});
};
/**
* Facade over provider implementations with payload validation and capability checks.
*/
export const llmService = {
listProviders(): Array<{
id: LLMProvider;
family: 'sdk' | 'cli';
capabilities: {
supportsRuntimePermissionRequests: boolean;
supportsThinkingModeControl: boolean;
supportsModelSwitching: boolean;
supportsSessionResume: boolean;
supportsSessionStop: boolean;
};
}> {
return llmProviderRegistry.listProviders().map((provider) => ({
id: provider.id,
family: provider.family,
capabilities: provider.capabilities,
}));
},
async listModels(providerName: string): Promise<ProviderModel[]> {
const provider = llmProviderRegistry.resolveProvider(providerName);
return provider.listModels();
},
listSessions(providerName: string): ProviderSessionSnapshot[] {
const provider = llmProviderRegistry.resolveProvider(providerName);
return provider.listSessions();
},
getSession(providerName: string, sessionId: string): ProviderSessionSnapshot | null {
const provider = llmProviderRegistry.resolveProvider(providerName);
return provider.getSession(sessionId);
},
async startSession(providerName: string, payload: unknown): Promise<ProviderSessionSnapshot> {
const provider = llmProviderRegistry.resolveProvider(providerName);
const input = parseStartPayload(payload);
validateCapabilityContracts(provider.capabilities, input);
return provider.launchSession(input);
},
async resumeSession(
providerName: string,
sessionId: string,
payload: unknown,
): Promise<ProviderSessionSnapshot> {
const provider = llmProviderRegistry.resolveProvider(providerName);
const input = parseStartPayload(payload);
validateCapabilityContracts(provider.capabilities, input);
return provider.resumeSession({ ...input, sessionId });
},
async waitForSession(providerName: string, sessionId: string): Promise<ProviderSessionSnapshot | null> {
const provider = llmProviderRegistry.resolveProvider(providerName);
return provider.waitForSession(sessionId);
},
async stopSession(providerName: string, sessionId: string): Promise<boolean> {
const provider = llmProviderRegistry.resolveProvider(providerName);
return provider.stopSession(sessionId);
},
async setSessionModel(providerName: string, sessionId: string, model: string): Promise<void> {
const provider = llmProviderRegistry.resolveProvider(providerName);
await provider.setSessionModel(sessionId, model);
},
async setSessionThinkingMode(
providerName: string,
sessionId: string,
thinkingMode: string,
): Promise<void> {
const provider = llmProviderRegistry.resolveProvider(providerName);
await provider.setSessionThinkingMode(sessionId, thinkingMode);
},
};
/**
* Parses and validates session start/resume request payloads.
*/
function parseStartPayload(payload: unknown): StartSessionInput {
if (!payload || typeof payload !== 'object') {
throw new AppError('Request body must be an object.', {
code: 'INVALID_REQUEST_BODY',
statusCode: 400,
});
}
const body = payload as Record<string, unknown>;
const prompt = normalizeOptionalString(body.prompt);
if (!prompt) {
throw new AppError('prompt is required.', {
code: 'PROMPT_REQUIRED',
statusCode: 400,
});
}
return {
prompt,
workspacePath: normalizeOptionalString(body.workspacePath),
sessionId: normalizeOptionalString(body.sessionId),
model: normalizeOptionalString(body.model),
thinkingMode: normalizeOptionalString(body.thinkingMode),
runtimePermissionMode: normalizePermissionMode(body.runtimePermissionMode),
allowYolo: body.allowYolo === true,
};
}
/**
* Enforces capability contracts before provider invocation.
*/
function validateCapabilityContracts(
capabilities: {
supportsRuntimePermissionRequests: boolean;
supportsThinkingModeControl: boolean;
},
input: StartSessionInput,
): void {
if (
input.runtimePermissionMode &&
input.runtimePermissionMode !== 'ask' &&
!capabilities.supportsRuntimePermissionRequests
) {
throw new AppError('Runtime permission requests are not supported by this provider.', {
code: 'RUNTIME_PERMISSION_NOT_SUPPORTED',
statusCode: 400,
});
}
if (input.thinkingMode && !capabilities.supportsThinkingModeControl) {
throw new AppError('Thinking mode is not supported by this provider.', {
code: 'THINKING_MODE_NOT_SUPPORTED',
statusCode: 400,
});
}
}

View File

@@ -0,0 +1,267 @@
import { AppError } from '@/shared/utils/app-error.js';
import type {
IProvider,
MutableProviderSession,
ProviderCapabilities,
ProviderExecutionFamily,
ProviderModel,
ProviderSessionEvent,
ProviderSessionSnapshot,
StartSessionInput,
} from '@/modules/llm/providers/provider.interface.js';
import type { LLMProvider } from '@/shared/types/app.js';
type SessionPreference = {
model?: string;
thinkingMode?: string;
};
const MAX_EVENT_BUFFER_SIZE = 2_000;
/**
* Shared provider base for session lifecycle state and capability gating.
*/
export abstract class AbstractProvider implements IProvider {
readonly id: LLMProvider;
readonly family: ProviderExecutionFamily;
readonly capabilities: ProviderCapabilities;
protected readonly sessions = new Map<string, MutableProviderSession>();
protected readonly sessionPreferences = new Map<string, SessionPreference>();
protected constructor(
id: LLMProvider,
family: ProviderExecutionFamily,
capabilities: ProviderCapabilities,
) {
this.id = id;
this.family = family;
this.capabilities = capabilities;
}
abstract listModels(): Promise<ProviderModel[]>;
abstract launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot>;
abstract resumeSession(
input: StartSessionInput & { sessionId: string },
): Promise<ProviderSessionSnapshot>;
/**
* Returns one in-memory session snapshot when present.
*/
getSession(sessionId: string): ProviderSessionSnapshot | null {
const session = this.sessions.get(sessionId);
if (!session) {
return null;
}
return this.toSnapshot(session);
}
/**
* Returns snapshots of all in-memory sessions.
*/
listSessions(): ProviderSessionSnapshot[] {
return [...this.sessions.values()].map((session) => this.toSnapshot(session));
}
/**
* Waits for a running session to complete and returns the final snapshot.
*/
async waitForSession(sessionId: string): Promise<ProviderSessionSnapshot | null> {
const session = this.sessions.get(sessionId);
if (!session) {
return null;
}
await session.completion;
return this.toSnapshot(session);
}
/**
* Requests a graceful session stop.
*/
async stopSession(sessionId: string): Promise<boolean> {
const session = this.sessions.get(sessionId);
if (!session) {
return false;
}
const stopped = await session.stop();
if (stopped && session.status === 'running') {
this.updateSessionStatus(session, 'stopped');
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'system',
message: 'Session stop requested.',
});
}
return stopped;
}
/**
* Validates/supports model switching and updates both live and persisted state.
*/
async setSessionModel(sessionId: string, model: string): Promise<void> {
if (!this.capabilities.supportsModelSwitching) {
throw new AppError(`Provider "${this.id}" does not support model switching.`, {
code: 'MODEL_SWITCH_NOT_SUPPORTED',
statusCode: 400,
});
}
const trimmedModel = model.trim();
if (!trimmedModel) {
throw new AppError('Model cannot be empty.', {
code: 'INVALID_MODEL',
statusCode: 400,
});
}
const session = this.sessions.get(sessionId);
if (session?.setModel) {
await session.setModel(trimmedModel);
}
const currentPreference = this.sessionPreferences.get(sessionId) ?? {};
this.sessionPreferences.set(sessionId, { ...currentPreference, model: trimmedModel });
if (session) {
session.model = trimmedModel;
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'system',
message: `Model updated to "${trimmedModel}".`,
});
}
}
/**
* Validates/supports thinking mode updates and applies them to live/persisted state.
*/
async setSessionThinkingMode(sessionId: string, thinkingMode: string): Promise<void> {
if (!this.capabilities.supportsThinkingModeControl) {
throw new AppError(`Provider "${this.id}" does not support thinking mode control.`, {
code: 'THINKING_MODE_NOT_SUPPORTED',
statusCode: 400,
});
}
const trimmedMode = thinkingMode.trim();
if (!trimmedMode) {
throw new AppError('Thinking mode cannot be empty.', {
code: 'INVALID_THINKING_MODE',
statusCode: 400,
});
}
const session = this.sessions.get(sessionId);
if (session?.setThinkingMode) {
await session.setThinkingMode(trimmedMode);
}
const currentPreference = this.sessionPreferences.get(sessionId) ?? {};
this.sessionPreferences.set(sessionId, { ...currentPreference, thinkingMode: trimmedMode });
if (session) {
session.thinkingMode = trimmedMode;
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'system',
message: `Thinking mode updated to "${trimmedMode}".`,
});
}
}
/**
* Reads saved preferences for resumed sessions.
*/
protected getSessionPreference(sessionId: string): SessionPreference {
return this.sessionPreferences.get(sessionId) ?? {};
}
/**
* Stores session preferences for subsequent resume/start operations.
*/
protected rememberSessionPreference(sessionId: string, preference: SessionPreference): void {
const currentPreference = this.sessionPreferences.get(sessionId) ?? {};
this.sessionPreferences.set(sessionId, {
...currentPreference,
...preference,
});
}
/**
* Creates mutable internal session state and registers it in memory.
*/
protected createSessionRecord(
sessionId: string,
input: {
model?: string;
thinkingMode?: string;
},
): MutableProviderSession {
const session: MutableProviderSession = {
sessionId,
provider: this.id,
family: this.family,
status: 'running',
startedAt: new Date().toISOString(),
model: input.model,
thinkingMode: input.thinkingMode,
events: [],
completion: Promise.resolve(),
stop: async () => false,
};
this.sessions.set(sessionId, session);
this.rememberSessionPreference(sessionId, {
model: input.model,
thinkingMode: input.thinkingMode,
});
return session;
}
/**
* Appends an event while enforcing the configured ring-buffer size.
*/
protected appendEvent(session: MutableProviderSession, event: ProviderSessionEvent): void {
session.events.push(event);
if (session.events.length > MAX_EVENT_BUFFER_SIZE) {
session.events.splice(0, session.events.length - MAX_EVENT_BUFFER_SIZE);
}
}
/**
* Marks the terminal state for a session.
*/
protected updateSessionStatus(
session: MutableProviderSession,
status: MutableProviderSession['status'],
error?: string,
): void {
session.status = status;
session.endedAt = new Date().toISOString();
session.error = error;
}
/**
* Converts mutable internal session state to an external snapshot.
*/
protected toSnapshot(session: MutableProviderSession): ProviderSessionSnapshot {
return {
sessionId: session.sessionId,
provider: session.provider,
family: session.family,
status: session.status,
startedAt: session.startedAt,
endedAt: session.endedAt,
model: session.model,
thinkingMode: session.thinkingMode,
events: [...session.events],
error: session.error,
};
}
}

View File

@@ -0,0 +1,284 @@
import { randomUUID } from 'node:crypto';
import { once } from 'node:events';
import type { ChildProcessWithoutNullStreams } from 'node:child_process';
import spawn from 'cross-spawn';
import { AbstractProvider } from '@/modules/llm/providers/abstract.provider.js';
import type {
MutableProviderSession,
ProviderCapabilities,
ProviderSessionEvent,
ProviderSessionSnapshot,
StartSessionInput,
} from '@/modules/llm/providers/provider.interface.js';
import { createStreamLineAccumulator } from '@/shared/platform/stream.js';
import type { LLMProvider } from '@/shared/types/app.js';
type CreateCliInvocationInput = StartSessionInput & {
sessionId: string;
isResume: boolean;
};
type CliInvocation = {
command: string;
args: string[];
cwd?: string;
env?: Record<string, string | undefined>;
};
const PROCESS_SHUTDOWN_GRACE_PERIOD_MS = 2_000;
/**
* Base class for CLI-driven providers with streamed stdout/stderr parsing.
*/
export abstract class BaseCliProvider extends AbstractProvider {
protected constructor(providerId: LLMProvider, capabilities: ProviderCapabilities) {
super(providerId, 'cli', capabilities);
}
/**
* Starts a new CLI session and begins process output streaming.
*/
async launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot> {
return this.startSessionInternal({
...input,
sessionId: input.sessionId ?? randomUUID(),
isResume: false,
});
}
/**
* Resumes an existing CLI session and begins process output streaming.
*/
async resumeSession(input: StartSessionInput & { sessionId: string }): Promise<ProviderSessionSnapshot> {
return this.startSessionInternal({
...input,
isResume: true,
});
}
/**
* Implemented by concrete CLI providers to describe command invocation.
*/
protected abstract createCliInvocation(input: CreateCliInvocationInput): CliInvocation;
/**
* Maps one stdout/stderr line into either JSON or plain-text event shapes.
*/
protected mapCliOutputLine(line: string, channel: 'stdout' | 'stderr'): ProviderSessionEvent {
const parsedJson = this.tryParseJson(line);
if (parsedJson !== null) {
return {
timestamp: new Date().toISOString(),
channel: 'json',
data: parsedJson,
};
}
return {
timestamp: new Date().toISOString(),
channel,
message: line,
};
}
/**
* Runs a one-off CLI command and returns full stdout text on success.
*/
protected async runCommandForOutput(command: string, args: string[]): Promise<string> {
const child = spawn(command, args, {
stdio: ['ignore', 'pipe', 'pipe'],
cwd: process.cwd(),
env: process.env,
});
let stdout = '';
let stderr = '';
child.stdout?.on('data', (chunk) => {
stdout += chunk.toString();
});
child.stderr?.on('data', (chunk) => {
stderr += chunk.toString();
});
const closePromise = once(child, 'close');
const errorPromise = once(child, 'error').then(([error]) => {
throw error;
});
await Promise.race([closePromise, errorPromise]);
if ((child.exitCode ?? 1) !== 0) {
const message = stderr.trim() || `Command "${command}" failed with code ${child.exitCode}`;
throw new Error(message);
}
return stdout;
}
/**
* Boots one CLI child process and wires stream handlers to the session buffer.
*/
private async startSessionInternal(input: CreateCliInvocationInput): Promise<ProviderSessionSnapshot> {
const preferred = this.getSessionPreference(input.sessionId);
const effectiveModel = input.model ?? preferred.model;
const effectiveThinking = input.thinkingMode ?? preferred.thinkingMode;
const session = this.createSessionRecord(input.sessionId, {
model: effectiveModel,
thinkingMode: effectiveThinking,
});
const invocation = this.createCliInvocation({
...input,
model: effectiveModel,
thinkingMode: effectiveThinking,
});
const child = spawn(invocation.command, invocation.args, {
cwd: invocation.cwd ?? input.workspacePath ?? process.cwd(),
env: {
...process.env,
...invocation.env,
},
stdio: ['ignore', 'pipe', 'pipe'],
}) as ChildProcessWithoutNullStreams;
const stop = async (): Promise<boolean> => this.terminateChildProcess(child);
session.stop = stop;
const stdoutAccumulator = createStreamLineAccumulator({ preserveEmptyLines: false });
const stderrAccumulator = createStreamLineAccumulator({ preserveEmptyLines: false });
child.stdout.on('data', (chunk) => {
const lines = stdoutAccumulator.push(chunk);
for (const line of lines) {
const event = this.mapCliOutputLine(line, 'stdout');
this.appendEvent(session, event);
}
});
child.stderr.on('data', (chunk) => {
const lines = stderrAccumulator.push(chunk);
for (const line of lines) {
const event = this.mapCliOutputLine(line, 'stderr');
this.appendEvent(session, event);
}
});
session.completion = this.waitForCliProcess(
session,
child,
stdoutAccumulator,
stderrAccumulator,
);
return this.toSnapshot(session);
}
/**
* Waits for process completion/error and marks final session status.
*/
private async waitForCliProcess(
session: MutableProviderSession,
child: ChildProcessWithoutNullStreams,
stdoutAccumulator: { flush: () => string[] },
stderrAccumulator: { flush: () => string[] },
): Promise<void> {
const closePromise = once(child, 'close') as Promise<[number | null, NodeJS.Signals | null]>;
const errorPromise = once(child, 'error') as Promise<[Error]>;
const raceResult = await Promise.race([
closePromise.then((result) => ({ type: 'close' as const, result })),
errorPromise.then((result) => ({ type: 'error' as const, result })),
]);
const pendingStdout = stdoutAccumulator.flush();
const pendingStderr = stderrAccumulator.flush();
for (const line of pendingStdout) {
this.appendEvent(session, this.mapCliOutputLine(line, 'stdout'));
}
for (const line of pendingStderr) {
this.appendEvent(session, this.mapCliOutputLine(line, 'stderr'));
}
if (raceResult.type === 'error') {
const [error] = raceResult.result;
const message = error.message || 'CLI process failed before start.';
this.updateSessionStatus(session, 'failed', message);
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'error',
message,
});
return;
}
const [code, signal] = raceResult.result;
if (session.status === 'stopped') {
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'system',
message: `Session stopped (${signal ?? 'SIGTERM'}).`,
});
return;
}
if (code === 0) {
this.updateSessionStatus(session, 'completed');
return;
}
const message = `CLI command exited with code ${code ?? 'null'}${signal ? ` (signal: ${signal})` : ''}`;
this.updateSessionStatus(session, 'failed', message);
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'error',
message,
});
}
/**
* Attempts graceful termination first, then force-kills when necessary.
*/
private async terminateChildProcess(child: ChildProcessWithoutNullStreams): Promise<boolean> {
if (child.killed || child.exitCode !== null) {
return true;
}
try {
child.kill('SIGTERM');
await Promise.race([
once(child, 'close'),
new Promise((resolve) => setTimeout(resolve, PROCESS_SHUTDOWN_GRACE_PERIOD_MS)),
]);
if (child.exitCode === null) {
child.kill('SIGKILL');
}
return true;
} catch {
return false;
}
}
/**
* Best-effort JSON parser for stream-json providers.
*/
private tryParseJson(line: string): unknown | null {
const trimmed = line.trim();
if (!trimmed || (!trimmed.startsWith('{') && !trimmed.startsWith('['))) {
return null;
}
try {
return JSON.parse(trimmed);
} catch {
return null;
}
}
}

View File

@@ -0,0 +1,147 @@
import { randomUUID } from 'node:crypto';
import { AbstractProvider } from '@/modules/llm/providers/abstract.provider.js';
import type {
MutableProviderSession,
ProviderCapabilities,
ProviderSessionEvent,
ProviderSessionSnapshot,
StartSessionInput,
} from '@/modules/llm/providers/provider.interface.js';
import type { LLMProvider } from '@/shared/types/app.js';
type CreateSdkExecutionInput = StartSessionInput & {
sessionId: string;
isResume: boolean;
};
type SdkExecution = {
stream: AsyncIterable<unknown>;
stop: () => Promise<boolean>;
setModel?: (model: string) => Promise<void>;
setThinkingMode?: (thinkingMode: string) => Promise<void>;
};
/**
* Base class for SDK-driven providers with async stream consumption.
*/
export abstract class BaseSdkProvider extends AbstractProvider {
protected constructor(providerId: LLMProvider, capabilities: ProviderCapabilities) {
super(providerId, 'sdk', capabilities);
}
/**
* Starts a new SDK session and begins event streaming.
*/
async launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot> {
return this.startSessionInternal({
...input,
sessionId: input.sessionId ?? randomUUID(),
isResume: false,
});
}
/**
* Resumes an existing SDK session and begins event streaming.
*/
async resumeSession(input: StartSessionInput & { sessionId: string }): Promise<ProviderSessionSnapshot> {
return this.startSessionInternal({
...input,
isResume: true,
});
}
/**
* Implemented by concrete SDK providers to create a running execution.
*/
protected abstract createSdkExecution(input: CreateSdkExecutionInput): Promise<SdkExecution>;
/**
* Normalizes raw SDK events to the shared event shape.
*/
protected mapSdkEvent(rawEvent: unknown): ProviderSessionEvent | null {
return {
timestamp: new Date().toISOString(),
channel: 'sdk',
data: rawEvent,
};
}
/**
* Initializes one SDK execution and wires it to the internal session record.
*/
private async startSessionInternal(input: CreateSdkExecutionInput): Promise<ProviderSessionSnapshot> {
const preferred = this.getSessionPreference(input.sessionId);
const effectiveModel = input.model ?? preferred.model;
const effectiveThinking = input.thinkingMode ?? preferred.thinkingMode;
const session = this.createSessionRecord(input.sessionId, {
model: effectiveModel,
thinkingMode: effectiveThinking,
});
let execution: SdkExecution;
try {
execution = await this.createSdkExecution({
...input,
model: effectiveModel,
thinkingMode: effectiveThinking,
});
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to start SDK session';
this.updateSessionStatus(session, 'failed', message);
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'error',
message,
});
throw error;
}
session.stop = execution.stop;
session.setModel = execution.setModel;
session.setThinkingMode = execution.setThinkingMode;
session.completion = this.consumeStream(session, execution.stream);
return this.toSnapshot(session);
}
/**
* Drains SDK events until completion/error and updates final status.
*/
private async consumeStream(
session: MutableProviderSession,
stream: AsyncIterable<unknown>,
): Promise<void> {
try {
for await (const sdkEvent of stream) {
const normalized = this.mapSdkEvent(sdkEvent);
if (normalized) {
this.appendEvent(session, normalized);
}
}
if (session.status === 'running') {
this.updateSessionStatus(session, 'completed');
}
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown SDK execution failure';
if (session.status === 'stopped') {
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'system',
message: 'Session stopped.',
});
return;
}
this.updateSessionStatus(session, 'failed', message);
this.appendEvent(session, {
timestamp: new Date().toISOString(),
channel: 'error',
message,
});
}
}
}

View File

@@ -0,0 +1,182 @@
import {
query,
type CanUseTool,
type ModelInfo,
type Options,
} from '@anthropic-ai/claude-agent-sdk';
import { BaseSdkProvider } from '@/modules/llm/providers/base-sdk.provider.js';
import type {
ProviderModel,
ProviderSessionEvent,
RuntimePermissionMode,
StartSessionInput,
} from '@/modules/llm/providers/provider.interface.js';
type ClaudeExecutionInput = StartSessionInput & {
sessionId: string;
isResume: boolean;
};
const CLAUDE_THINKING_LEVELS = new Set(['low', 'medium', 'high', 'max']);
/**
* Claude SDK provider implementation.
*/
export class ClaudeProvider extends BaseSdkProvider {
constructor() {
super('claude', {
supportsRuntimePermissionRequests: true,
supportsThinkingModeControl: true,
supportsModelSwitching: true,
supportsSessionResume: true,
supportsSessionStop: true,
});
}
/**
* Retrieves available Claude models from the SDK.
*/
async listModels(): Promise<ProviderModel[]> {
const probe = query({
prompt: 'model_probe',
options: {
permissionMode: 'plan',
},
});
try {
const models = await probe.supportedModels();
return models.map((model) => this.mapModelInfo(model));
} finally {
probe.close();
}
}
/**
* Creates a Claude SDK query execution for start/resume flows.
*/
protected async createSdkExecution(input: ClaudeExecutionInput): Promise<{
stream: AsyncIterable<unknown>;
stop: () => Promise<boolean>;
setModel: (model: string) => Promise<void>;
}> {
const options: Options = {
cwd: input.workspacePath,
model: input.model,
effort: this.resolveClaudeEffort(input.thinkingMode),
canUseTool: this.resolvePermissionHandler(input.runtimePermissionMode),
};
if (input.isResume) {
options.resume = input.sessionId;
} else {
options.sessionId = input.sessionId;
}
const queryInstance = query({
prompt: input.prompt,
options,
});
return {
stream: queryInstance,
stop: async () => {
await queryInstance.interrupt();
return true;
},
setModel: async (model: string) => {
await queryInstance.setModel(model);
},
};
}
/**
* Produces compact event metadata for frontend stream rendering.
*/
protected mapSdkEvent(rawEvent: unknown): ProviderSessionEvent | null {
if (typeof rawEvent !== 'object' || rawEvent === null) {
return {
timestamp: new Date().toISOString(),
channel: 'sdk',
message: String(rawEvent),
};
}
const messageType = this.getStringProperty(rawEvent, 'type');
const messageSubtype = this.getStringProperty(rawEvent, 'subtype');
const message = [messageType, messageSubtype].filter(Boolean).join(':') || 'claude_event';
return {
timestamp: new Date().toISOString(),
channel: 'sdk',
message,
data: rawEvent,
};
}
/**
* Normalizes Claude model metadata to the shared model shape.
*/
private mapModelInfo(model: ModelInfo): ProviderModel {
return {
value: model.value,
displayName: model.displayName,
description: model.description,
supportsThinkingModes: Boolean(model.supportsEffort),
supportedThinkingModes: model.supportedEffortLevels,
};
}
/**
* Maps requested thinking mode to Claude effort levels.
*/
private resolveClaudeEffort(thinkingMode?: string): Options['effort'] {
if (!thinkingMode) {
return 'high';
}
const normalized = thinkingMode.trim().toLowerCase();
if (CLAUDE_THINKING_LEVELS.has(normalized)) {
return normalized as Options['effort'];
}
return 'high';
}
/**
* Builds a runtime permission callback when explicit allow/deny is requested.
*/
private resolvePermissionHandler(mode?: RuntimePermissionMode): CanUseTool | undefined {
if (!mode || mode === 'ask') {
return undefined;
}
if (mode === 'allow') {
return async () => ({ behavior: 'allow' });
}
return async () => ({
behavior: 'deny',
message: 'Permission denied by runtime permission mode.',
interrupt: false,
});
}
/**
* Reads one optional string property from an unknown event object.
*/
private getStringProperty(value: unknown, key: string): string | undefined {
if (!value || typeof value !== 'object') {
return undefined;
}
const record = value as Record<string, unknown>;
const rawValue = record[key];
if (typeof rawValue !== 'string') {
return undefined;
}
return rawValue;
}
}

View File

@@ -0,0 +1,171 @@
import os from 'node:os';
import path from 'node:path';
import { readFile } from 'node:fs/promises';
import { BaseSdkProvider } from '@/modules/llm/providers/base-sdk.provider.js';
import type { ProviderModel, ProviderSessionEvent, StartSessionInput } from '@/modules/llm/providers/provider.interface.js';
import { AppError } from '@/shared/utils/app-error.js';
type CodexExecutionInput = StartSessionInput & {
sessionId: string;
isResume: boolean;
};
type CodexModelCacheEntry = {
slug?: string;
display_name?: string;
description?: string;
supported_reasoning_levels?: Array<{
effort?: string;
description?: string;
}>;
priority?: number;
};
type CodexSdkClient = {
startThread: (options?: Record<string, unknown>) => CodexThread;
resumeThread: (sessionId: string, options?: Record<string, unknown>) => CodexThread;
};
type CodexThread = {
runStreamed: (
prompt: string,
options?: {
signal?: AbortSignal;
},
) => Promise<{
events: AsyncIterable<unknown>;
}>;
};
type CodexSdkModule = {
Codex: new () => CodexSdkClient;
};
/**
* Codex SDK provider implementation.
*/
export class CodexProvider extends BaseSdkProvider {
constructor() {
super('codex', {
supportsRuntimePermissionRequests: false,
supportsThinkingModeControl: true,
supportsModelSwitching: true,
supportsSessionResume: true,
supportsSessionStop: true,
});
}
/**
* Reads codex models from ~/.codex/models_cache.json.
*/
async listModels(): Promise<ProviderModel[]> {
const modelCachePath = path.join(os.homedir(), '.codex', 'models_cache.json');
let content: string;
try {
content = await readFile(modelCachePath, 'utf8');
} catch (error) {
const code = (error as NodeJS.ErrnoException)?.code;
if (code === 'ENOENT') {
throw new AppError('Codex model cache was not found. Expected ~/.codex/models_cache.json.', {
code: 'CODEX_MODEL_CACHE_NOT_FOUND',
statusCode: 404,
});
}
throw error;
}
const parsed = JSON.parse(content) as { models?: CodexModelCacheEntry[] };
const models = parsed.models ?? [];
return models
.filter((entry) => Boolean(entry.slug))
.map((entry) => ({
value: entry.slug as string,
displayName: entry.display_name ?? entry.slug ?? 'unknown',
description: entry.description,
default: entry.priority === 1,
supportsThinkingModes: Boolean(entry.supported_reasoning_levels?.length),
supportedThinkingModes: entry.supported_reasoning_levels
?.map((level) => level.effort)
.filter((effort): effort is string => typeof effort === 'string'),
}));
}
/**
* Creates a Codex thread execution and wires abort support.
*/
protected async createSdkExecution(input: CodexExecutionInput): Promise<{
stream: AsyncIterable<unknown>;
stop: () => Promise<boolean>;
}> {
const sdkModule = await this.loadCodexSdkModule();
const client = new sdkModule.Codex();
const threadOptions: Record<string, unknown> = {
model: input.model,
workingDirectory: input.workspacePath,
modelReasoningEffort: input.thinkingMode,
};
const thread = input.isResume
? client.resumeThread(input.sessionId, threadOptions)
: client.startThread(threadOptions);
const abortController = new AbortController();
const streamedTurn = await thread.runStreamed(input.prompt, {
signal: abortController.signal,
});
return {
stream: streamedTurn.events,
stop: async () => {
abortController.abort('Session stop requested');
return true;
},
};
}
/**
* Normalizes Codex stream events into the shared event shape.
*/
protected mapSdkEvent(rawEvent: unknown): ProviderSessionEvent | null {
if (typeof rawEvent !== 'object' || rawEvent === null) {
return {
timestamp: new Date().toISOString(),
channel: 'sdk',
message: String(rawEvent),
};
}
const record = rawEvent as Record<string, unknown>;
const message = typeof record.type === 'string' ? record.type : 'codex_event';
return {
timestamp: new Date().toISOString(),
channel: 'sdk',
message,
data: rawEvent,
};
}
/**
* Dynamically imports the Codex SDK to support environments where it is optional.
*/
private async loadCodexSdkModule(): Promise<CodexSdkModule> {
try {
const sdkModule = (await import('@openai/codex-sdk')) as unknown as CodexSdkModule;
if (!sdkModule?.Codex) {
throw new Error('Codex SDK did not export "Codex".');
}
return sdkModule;
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to import Codex SDK';
throw new AppError(`Codex SDK is unavailable: ${message}`, {
code: 'CODEX_SDK_UNAVAILABLE',
statusCode: 503,
});
}
}
}

View File

@@ -0,0 +1,123 @@
import { BaseCliProvider } from '@/modules/llm/providers/base-cli.provider.js';
import type { ProviderModel, StartSessionInput } from '@/modules/llm/providers/provider.interface.js';
type CursorExecutionInput = StartSessionInput & {
sessionId: string;
isResume: boolean;
};
const ANSI_REGEX =
// biome-ignore lint/suspicious/noControlCharactersInRegex: ANSI escape stripping.
/\u001b\[[0-9;]*m/g;
/**
* Cursor CLI provider implementation.
*/
export class CursorProvider extends BaseCliProvider {
constructor() {
super('cursor', {
supportsRuntimePermissionRequests: false,
supportsThinkingModeControl: false,
supportsModelSwitching: true,
supportsSessionResume: true,
supportsSessionStop: true,
});
}
/**
* Lists cursor models by parsing `cursor-agent --list-models`.
*/
async listModels(): Promise<ProviderModel[]> {
const output = await this.runCommandForOutput('cursor-agent', ['--list-models']);
return this.parseModelsOutput(output);
}
/**
* Creates the command invocation for cursor start/resume flows.
*/
protected createCliInvocation(input: CursorExecutionInput): {
command: string;
args: string[];
cwd?: string;
} {
const args = ['--print', '--trust', '--output-format', 'stream-json'];
if (input.allowYolo) {
args.push('--yolo');
}
if (input.model) {
args.push('--model', input.model);
}
if (input.isResume) {
args.push('--resume', input.sessionId);
}
args.push(input.prompt);
return {
command: 'cursor-agent',
args,
cwd: input.workspacePath,
};
}
/**
* Parses full model-list output into normalized model entries.
*/
private parseModelsOutput(output: string): ProviderModel[] {
const models: ProviderModel[] = [];
const lines = output.replace(ANSI_REGEX, '').split(/\r?\n/);
for (const line of lines) {
const parsed = this.parseModelLine(line);
if (!parsed) {
continue;
}
models.push(parsed);
}
return models;
}
/**
* Parses one cursor model line.
*/
private parseModelLine(line: string): ProviderModel | null {
const trimmed = line.trim();
if (
!trimmed ||
trimmed === 'Available models' ||
trimmed.startsWith('Loading models') ||
trimmed.startsWith('Tip:')
) {
return null;
}
const match = trimmed.match(/^(.+?)\s+-\s+(.+)$/);
if (!match) {
return null;
}
const value = match[1].trim();
const descriptionRaw = match[2].trim();
const current = /\(current\)/i.test(descriptionRaw);
const defaultModel = /\(default\)/i.test(descriptionRaw);
const description = descriptionRaw
.replace(/\s*\((current|default)\)/gi, '')
.replace(/\s{2,}/g, ' ')
.trim();
return {
value,
displayName: value,
description,
current,
default: defaultModel,
supportsThinkingModes: false,
supportedThinkingModes: [],
};
}
}

View File

@@ -0,0 +1,66 @@
import { BaseCliProvider } from '@/modules/llm/providers/base-cli.provider.js';
import type { ProviderModel, StartSessionInput } from '@/modules/llm/providers/provider.interface.js';
type GeminiExecutionInput = StartSessionInput & {
sessionId: string;
isResume: boolean;
};
const GEMINI_MODELS: ProviderModel[] = [
{ value: 'gemini-3.1-pro-preview', displayName: 'Gemini 3.1 Pro Preview' },
{ value: 'gemini-3-pro-preview', displayName: 'Gemini 3 Pro Preview' },
{ value: 'gemini-3-flash-preview', displayName: 'Gemini 3 Flash Preview' },
{ value: 'gemini-2.5-flash', displayName: 'Gemini 2.5 Flash' },
{ value: 'gemini-2.5-pro', displayName: 'Gemini 2.5 Pro' },
{ value: 'gemini-2.0-flash-lite', displayName: 'Gemini 2.0 Flash Lite' },
{ value: 'gemini-2.0-flash', displayName: 'Gemini 2.0 Flash' },
{ value: 'gemini-2.0-pro-exp', displayName: 'Gemini 2.0 Pro Experimental' },
{ value: 'gemini-2.0-flash-thinking-exp', displayName: 'Gemini 2.0 Flash Thinking' },
];
/**
* Gemini CLI provider implementation.
*/
export class GeminiProvider extends BaseCliProvider {
constructor() {
super('gemini', {
supportsRuntimePermissionRequests: false,
supportsThinkingModeControl: false,
supportsModelSwitching: true,
supportsSessionResume: true,
supportsSessionStop: true,
});
}
/**
* Returns curated Gemini model options from the refactor doc.
*/
async listModels(): Promise<ProviderModel[]> {
return GEMINI_MODELS;
}
/**
* Creates the command invocation for gemini start/resume flows.
*/
protected createCliInvocation(input: GeminiExecutionInput): {
command: string;
args: string[];
cwd?: string;
} {
const args = ['--prompt', input.prompt, '--output-format', 'stream-json'];
if (input.model) {
args.push('--model', input.model);
}
if (input.isResume) {
args.push('--resume', input.sessionId);
}
return {
command: 'gemini',
args,
cwd: input.workspacePath,
};
}
}

View File

@@ -0,0 +1,103 @@
import type { LLMProvider } from '@/shared/types/app.js';
export type ProviderExecutionFamily = 'sdk' | 'cli';
export type ProviderSessionStatus = 'running' | 'completed' | 'failed' | 'stopped';
export type RuntimePermissionMode = 'ask' | 'allow' | 'deny';
/**
* Advertises optional provider behaviors so route/service code can gate features.
*/
export type ProviderCapabilities = {
supportsRuntimePermissionRequests: boolean;
supportsThinkingModeControl: boolean;
supportsModelSwitching: boolean;
supportsSessionResume: boolean;
supportsSessionStop: boolean;
};
/**
* Provider model descriptor normalized for frontend consumption.
*/
export type ProviderModel = {
value: string;
displayName: string;
description?: string;
default?: boolean;
current?: boolean;
supportsThinkingModes?: boolean;
supportedThinkingModes?: string[];
};
/**
* Unified in-memory event emitted while a provider session runs.
*/
export type ProviderSessionEvent = {
timestamp: string;
channel: 'sdk' | 'stdout' | 'stderr' | 'json' | 'system' | 'error';
message?: string;
data?: unknown;
};
/**
* Common launch/resume payload consumed by all providers.
*/
export type StartSessionInput = {
prompt: string;
workspacePath?: string;
sessionId?: string;
model?: string;
thinkingMode?: string;
runtimePermissionMode?: RuntimePermissionMode;
allowYolo?: boolean;
};
/**
* Snapshot shape exposed externally for a provider session.
*/
export type ProviderSessionSnapshot = {
sessionId: string;
provider: LLMProvider;
family: ProviderExecutionFamily;
status: ProviderSessionStatus;
startedAt: string;
endedAt?: string;
model?: string;
thinkingMode?: string;
events: ProviderSessionEvent[];
error?: string;
};
/**
* Provider contract that both SDK and CLI families implement.
*/
export interface IProvider {
readonly id: LLMProvider;
readonly family: ProviderExecutionFamily;
readonly capabilities: ProviderCapabilities;
listModels(): Promise<ProviderModel[]>;
launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot>;
resumeSession(input: StartSessionInput & { sessionId: string }): Promise<ProviderSessionSnapshot>;
stopSession(sessionId: string): Promise<boolean>;
setSessionModel(sessionId: string, model: string): Promise<void>;
setSessionThinkingMode(sessionId: string, thinkingMode: string): Promise<void>;
getSession(sessionId: string): ProviderSessionSnapshot | null;
listSessions(): ProviderSessionSnapshot[];
waitForSession(sessionId: string): Promise<ProviderSessionSnapshot | null>;
}
/**
* Internal mutable session state used by provider base classes.
*/
export type MutableProviderSession = Omit<ProviderSessionSnapshot, 'events'> & {
events: ProviderSessionEvent[];
completion: Promise<void>;
stop: () => Promise<boolean>;
setModel?: (model: string) => Promise<void>;
setThinkingMode?: (thinkingMode: string) => Promise<void>;
};

View File

@@ -0,0 +1,84 @@
import os from 'node:os';
import path from 'node:path';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import {
buildLookupMap,
extractFirstValidJsonlData,
findFilesRecursivelyCreatedAfter,
normalizeSessionName,
readFileTimestamps,
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
type ParsedSession = {
sessionId: string;
workspacePath: string;
sessionName?: string;
};
/**
* Session indexer for Claude transcript artifacts.
*/
export class ClaudeSessionIndexer implements ISessionIndexer {
readonly provider = 'claude' as const;
/**
* Scans ~/.claude projects and upserts discovered sessions into DB.
*/
async synchronize(lastScanAt: Date | null): Promise<number> {
const claudeHome = path.join(os.homedir(), '.claude');
const nameMap = await buildLookupMap(path.join(claudeHome, 'history.jsonl'), 'sessionId', 'display');
const files = await findFilesRecursivelyCreatedAfter(
path.join(claudeHome, 'projects'),
'.jsonl',
lastScanAt,
);
let processed = 0;
for (const filePath of files) {
const parsed = await this.processSessionFile(filePath, nameMap);
if (!parsed) {
continue;
}
const timestamps = await readFileTimestamps(filePath);
sessionsDb.createSession(
parsed.sessionId,
this.provider,
parsed.workspacePath,
parsed.sessionName,
timestamps.createdAt,
timestamps.updatedAt,
filePath,
);
processed += 1;
}
return processed;
}
/**
* Extracts session metadata from one Claude JSONL session file.
*/
private async processSessionFile(
filePath: string,
nameMap: Map<string, string>,
): Promise<ParsedSession | null> {
return extractFirstValidJsonlData(filePath, (rawData) => {
const data = rawData as Record<string, unknown>;
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : undefined;
const workspacePath = typeof data.cwd === 'string' ? data.cwd : undefined;
if (!sessionId || !workspacePath) {
return null;
}
return {
sessionId,
workspacePath,
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Claude Session'),
};
});
}
}

View File

@@ -0,0 +1,85 @@
import os from 'node:os';
import path from 'node:path';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import {
buildLookupMap,
extractFirstValidJsonlData,
findFilesRecursivelyCreatedAfter,
normalizeSessionName,
readFileTimestamps,
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
type ParsedSession = {
sessionId: string;
workspacePath: string;
sessionName?: string;
};
/**
* Session indexer for Codex transcript artifacts.
*/
export class CodexSessionIndexer implements ISessionIndexer {
readonly provider = 'codex' as const;
/**
* Scans ~/.codex sessions and upserts discovered sessions into DB.
*/
async synchronize(lastScanAt: Date | null): Promise<number> {
const codexHome = path.join(os.homedir(), '.codex');
const nameMap = await buildLookupMap(path.join(codexHome, 'session_index.jsonl'), 'id', 'thread_name');
const files = await findFilesRecursivelyCreatedAfter(
path.join(codexHome, 'sessions'),
'.jsonl',
lastScanAt,
);
let processed = 0;
for (const filePath of files) {
const parsed = await this.processSessionFile(filePath, nameMap);
if (!parsed) {
continue;
}
const timestamps = await readFileTimestamps(filePath);
sessionsDb.createSession(
parsed.sessionId,
this.provider,
parsed.workspacePath,
parsed.sessionName,
timestamps.createdAt,
timestamps.updatedAt,
filePath,
);
processed += 1;
}
return processed;
}
/**
* Extracts session metadata from one Codex JSONL session file.
*/
private async processSessionFile(
filePath: string,
nameMap: Map<string, string>,
): Promise<ParsedSession | null> {
return extractFirstValidJsonlData(filePath, (rawData) => {
const data = rawData as Record<string, unknown>;
const payload = data.payload as Record<string, unknown> | undefined;
const sessionId = typeof payload?.id === 'string' ? payload.id : undefined;
const workspacePath = typeof payload?.cwd === 'string' ? payload.cwd : undefined;
if (!sessionId || !workspacePath) {
return null;
}
return {
sessionId,
workspacePath,
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Codex Session'),
};
});
}
}

View File

@@ -0,0 +1,138 @@
import crypto from 'node:crypto';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import readline from 'node:readline';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import {
extractFirstValidJsonlData,
findFilesRecursivelyCreatedAfter,
listDirectoryEntriesSafe,
normalizeSessionName,
readFileTimestamps,
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
type ParsedSession = {
sessionId: string;
workspacePath: string;
sessionName?: string;
};
/**
* Session indexer for Cursor transcript artifacts.
*/
export class CursorSessionIndexer implements ISessionIndexer {
readonly provider = 'cursor' as const;
/**
* Scans Cursor chats and upserts discovered sessions into DB.
*/
async synchronize(lastScanAt: Date | null): Promise<number> {
const cursorHome = path.join(os.homedir(), '.cursor');
const projectsDir = path.join(cursorHome, 'projects');
const projectEntries = await listDirectoryEntriesSafe(projectsDir);
const seenWorkspacePaths = new Set<string>();
let processed = 0;
for (const entry of projectEntries) {
if (!entry.isDirectory()) {
continue;
}
const workerLogPath = path.join(projectsDir, entry.name, 'worker.log');
const workspacePath = await this.extractWorkspacePathFromWorkerLog(workerLogPath);
if (!workspacePath || seenWorkspacePaths.has(workspacePath)) {
continue;
}
seenWorkspacePaths.add(workspacePath);
const workspaceHash = this.md5(workspacePath);
const chatsDir = path.join(cursorHome, 'chats', workspaceHash);
const files = await findFilesRecursivelyCreatedAfter(chatsDir, '.jsonl', lastScanAt);
for (const filePath of files) {
const parsed = await this.processSessionFile(filePath);
if (!parsed) {
continue;
}
const timestamps = await readFileTimestamps(filePath);
sessionsDb.createSession(
parsed.sessionId,
this.provider,
parsed.workspacePath,
parsed.sessionName,
timestamps.createdAt,
timestamps.updatedAt,
filePath,
);
processed += 1;
}
}
return processed;
}
/**
* Produces the same workspace hash Cursor uses in chat directory names.
*/
private md5(input: string): string {
return crypto.createHash('md5').update(input).digest('hex');
}
/**
* Extracts workspace path from Cursor worker.log.
*/
private async extractWorkspacePathFromWorkerLog(filePath: string): Promise<string | null> {
try {
const fileStream = fs.createReadStream(filePath, { encoding: 'utf8' });
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
for await (const line of lineReader) {
const match = line.match(/workspacePath=(.*)$/);
const workspacePath = match?.[1]?.trim();
if (workspacePath) {
lineReader.close();
fileStream.close();
return workspacePath;
}
}
} catch {
// Missing worker logs are valid for partial/incomplete session data.
}
return null;
}
/**
* Extracts session metadata from one Cursor JSONL session file.
*/
private async processSessionFile(filePath: string): Promise<ParsedSession | null> {
const sessionId = path.basename(filePath, '.jsonl');
const grandparentDir = path.dirname(path.dirname(filePath));
const workerLogPath = path.join(grandparentDir, 'worker.log');
const workspacePath = await this.extractWorkspacePathFromWorkerLog(workerLogPath);
if (!workspacePath) {
return null;
}
return extractFirstValidJsonlData(filePath, (rawData) => {
const data = rawData as Record<string, any>;
if (data.role !== 'user') {
return null;
}
const text = typeof data.message?.content?.[0]?.text === 'string' ? data.message.content[0].text : '';
const firstLine = text.replace(/<\/?user_query>/g, '').trim().split('\n')[0];
return {
sessionId,
workspacePath,
sessionName: normalizeSessionName(firstLine, 'Untitled Cursor Session'),
};
});
}
}

View File

@@ -0,0 +1,128 @@
import os from 'node:os';
import path from 'node:path';
import { readFile } from 'node:fs/promises';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import {
findFilesRecursivelyCreatedAfter,
normalizeSessionName,
readFileTimestamps,
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
type ParsedSession = {
sessionId: string;
workspacePath: string;
sessionName?: string;
};
/**
* Session indexer for Gemini transcript artifacts.
*/
export class GeminiSessionIndexer implements ISessionIndexer {
readonly provider = 'gemini' as const;
/**
* Scans Gemini session JSON files and upserts discovered sessions into DB.
*/
async synchronize(lastScanAt: Date | null): Promise<number> {
const geminiHome = path.join(os.homedir(), '.gemini');
const legacySessionFiles = await findFilesRecursivelyCreatedAfter(
path.join(geminiHome, 'sessions'),
'.json',
lastScanAt,
);
const tempFiles = await findFilesRecursivelyCreatedAfter(
path.join(geminiHome, 'tmp'),
'.json',
lastScanAt,
);
const files = [...legacySessionFiles, ...tempFiles];
let processed = 0;
for (const filePath of files) {
if (
filePath.startsWith(path.join(geminiHome, 'tmp')) &&
!filePath.includes(`${path.sep}chats${path.sep}`)
) {
continue;
}
const parsed = await this.processSessionFile(filePath);
if (!parsed) {
continue;
}
const timestamps = await readFileTimestamps(filePath);
sessionsDb.createSession(
parsed.sessionId,
this.provider,
parsed.workspacePath,
parsed.sessionName,
timestamps.createdAt,
timestamps.updatedAt,
filePath,
);
processed += 1;
}
return processed;
}
/**
* Extracts session metadata from one Gemini JSON artifact.
*/
private async processSessionFile(filePath: string): Promise<ParsedSession | null> {
try {
const content = await readFile(filePath, 'utf8');
const data = JSON.parse(content) as Record<string, any>;
const sessionId =
typeof data.sessionId === 'string'
? data.sessionId
: typeof data.id === 'string'
? data.id
: undefined;
if (!sessionId) {
return null;
}
let workspacePath = typeof data.projectPath === 'string' ? data.projectPath : '';
if (!workspacePath && filePath.includes(`${path.sep}chats${path.sep}`)) {
const chatsDir = path.dirname(filePath);
const workspaceDir = path.dirname(chatsDir);
const projectRootPath = path.join(workspaceDir, '.project_root');
try {
const rootContent = await readFile(projectRootPath, 'utf8');
workspacePath = rootContent.trim();
} catch {
// Some Gemini artifacts do not ship a .project_root marker.
}
}
if (!workspacePath) {
return null;
}
const messages = Array.isArray(data.messages) ? data.messages : [];
const firstMessage = messages[0] as Record<string, any> | undefined;
let rawName: string | undefined;
if (Array.isArray(firstMessage?.content) && typeof firstMessage.content[0]?.text === 'string') {
rawName = firstMessage.content[0].text;
} else if (typeof firstMessage?.content === 'string') {
rawName = firstMessage.content;
}
return {
sessionId,
workspacePath,
sessionName: normalizeSessionName(rawName, 'New Gemini Chat'),
};
} catch {
return null;
}
}
}

View File

@@ -0,0 +1,15 @@
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
import { ClaudeSessionIndexer } from '@/modules/llm/session-indexers/claude.session-indexer.js';
import { CodexSessionIndexer } from '@/modules/llm/session-indexers/codex.session-indexer.js';
import { CursorSessionIndexer } from '@/modules/llm/session-indexers/cursor.session-indexer.js';
import { GeminiSessionIndexer } from '@/modules/llm/session-indexers/gemini.session-indexer.js';
/**
* Provider-specific session indexers used by the sync orchestrator.
*/
export const sessionIndexers: ISessionIndexer[] = [
new ClaudeSessionIndexer(),
new CodexSessionIndexer(),
new CursorSessionIndexer(),
new GeminiSessionIndexer(),
];

View File

@@ -0,0 +1,13 @@
import type { LLMProvider } from '@/shared/types/app.js';
/**
* Contract for provider-specific session indexing logic.
*/
export interface ISessionIndexer {
readonly provider: LLMProvider;
/**
* Scans provider session artifacts and upserts discovered sessions into DB.
*/
synchronize(lastScanAt: Date | null): Promise<number>;
}

View File

@@ -0,0 +1,154 @@
import fs from 'node:fs';
import fsp from 'node:fs/promises';
import path from 'node:path';
import readline from 'node:readline';
/**
* Keeps extracted session names compact and UI-safe.
*/
export function normalizeSessionName(rawValue: string | undefined, fallback: string): string {
const normalized = (rawValue ?? '').replace(/\s+/g, ' ').trim();
if (!normalized) {
return fallback;
}
return normalized.slice(0, 120);
}
/**
* Returns directory entries or an empty array when the directory does not exist.
*/
export async function listDirectoryEntriesSafe(
directoryPath: string,
): Promise<import('node:fs').Dirent[]> {
try {
return await fsp.readdir(directoryPath, { withFileTypes: true });
} catch {
return [];
}
}
/**
* Builds a lookup map from a JSONL index file by extracting a key/value pair per row.
* The first occurrence of a key wins so we preserve earliest metadata.
*/
export async function buildLookupMap(
filePath: string,
keyField: string,
valueField: string,
): Promise<Map<string, string>> {
const lookup = new Map<string, string>();
try {
const fileStream = fs.createReadStream(filePath);
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
for await (const line of lineReader) {
const trimmed = line.trim();
if (!trimmed) {
continue;
}
const parsed = JSON.parse(trimmed) as Record<string, unknown>;
const key = parsed[keyField];
const value = parsed[valueField];
if (typeof key === 'string' && typeof value === 'string' && !lookup.has(key)) {
lookup.set(key, value);
}
}
} catch {
// Missing index files are normal for users who have not used a provider yet.
}
return lookup;
}
/**
* Recursively scans for files with a given extension and optionally filters
* them to only files created after `lastScanAt`.
*/
export async function findFilesRecursivelyCreatedAfter(
rootDir: string,
extension: string,
lastScanAt: Date | null,
fileList: string[] = [],
): Promise<string[]> {
try {
const entries = await fsp.readdir(rootDir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(rootDir, entry.name);
if (entry.isDirectory()) {
await findFilesRecursivelyCreatedAfter(fullPath, extension, lastScanAt, fileList);
continue;
}
if (!entry.isFile() || !entry.name.endsWith(extension)) {
continue;
}
if (!lastScanAt) {
fileList.push(fullPath);
continue;
}
const stats = await fsp.stat(fullPath);
if (stats.birthtime > lastScanAt) {
fileList.push(fullPath);
}
}
} catch {
// Missing provider directories should not fail the full sync.
}
return fileList;
}
/**
* Reads JSONL rows until the extractor yields a valid session identity.
*/
export async function extractFirstValidJsonlData<T>(
filePath: string,
extractor: (parsedJson: unknown) => T | null | undefined,
): Promise<T | null> {
try {
const fileStream = fs.createReadStream(filePath);
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
for await (const line of lineReader) {
const trimmed = line.trim();
if (!trimmed) {
continue;
}
const parsed = JSON.parse(trimmed);
const extracted = extractor(parsed);
if (extracted) {
lineReader.close();
fileStream.close();
return extracted;
}
}
} catch {
// Ignore malformed session files and continue scanning.
}
return null;
}
/**
* Reads filesystem timestamps for DB metadata fields.
*/
export async function readFileTimestamps(
filePath: string,
): Promise<{ createdAt?: string; updatedAt?: string }> {
try {
const stat = await fsp.stat(filePath);
return {
createdAt: stat.birthtime.toISOString(),
updatedAt: stat.mtime.toISOString(),
};
} catch {
return {};
}
}

View File

@@ -0,0 +1,236 @@
import path from 'node:path';
import fsp, { readFile } from 'node:fs/promises';
import { scanStateDb } from '@/shared/database/repositories/scan-state.db.js';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import type { LLMProvider } from '@/shared/types/app.js';
import { AppError } from '@/shared/utils/app-error.js';
import { sessionIndexers } from '@/modules/llm/session-indexers/index.js';
type SyncResult = {
processedByProvider: Record<LLMProvider, number>;
failures: string[];
};
type SessionHistoryPayload = {
sessionId: string;
provider: string;
workspacePath: string;
filePath: string;
fileType: 'jsonl' | 'json';
entries: unknown[];
};
const SESSION_ID_PATTERN = /^[a-zA-Z0-9._-]{1,120}$/;
/**
* Restricts session IDs before they are used in DB/filesystem operations.
*/
function sanitizeSessionId(sessionId: string): string {
const value = String(sessionId).trim();
if (!SESSION_ID_PATTERN.test(value)) {
throw new AppError('Invalid session ID format.', {
code: 'INVALID_SESSION_ID',
statusCode: 400,
});
}
return value;
}
/**
* Removes one file if it exists.
*/
async function removeFileIfExists(filePath: string): Promise<boolean> {
try {
await fsp.unlink(filePath);
return true;
} catch (error) {
const code = (error as NodeJS.ErrnoException).code;
if (code === 'ENOENT') {
return false;
}
throw error;
}
}
/**
* Parses newline-delimited JSON files and preserves malformed lines as raw entries.
*/
const parseJsonl = (content: string): unknown[] => {
const entries: unknown[] = [];
const lines = content.split(/\r?\n/);
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) {
continue;
}
try {
entries.push(JSON.parse(trimmed));
} catch {
entries.push({ raw: trimmed, parseError: true });
}
}
return entries;
};
/**
* Parses JSON files and normalizes object payloads into a single-element array.
*/
const parseJson = (content: string): unknown[] => {
try {
const parsed = JSON.parse(content) as unknown;
return Array.isArray(parsed) ? parsed : [parsed];
} catch {
return [{ raw: content, parseError: true }];
}
};
/**
* Orchestrates provider-specific session indexers and DB-path based cleanup.
*/
export const llmSessionsService = {
/**
* Lists indexed sessions from the shared DB, optionally scoped to one provider.
*/
listIndexedSessions(provider?: string) {
const allSessions = sessionsDb.getAllSessions();
if (!provider) {
return allSessions;
}
return allSessions.filter((session) => session.provider === provider);
},
/**
* Runs all provider indexers and updates `scan_state.last_scanned_at`.
*/
async synchronizeSessions(): Promise<SyncResult> {
const lastScanAt = scanStateDb.getLastScannedAt();
const processedByProvider: Record<LLMProvider, number> = {
claude: 0,
codex: 0,
cursor: 0,
gemini: 0,
};
const failures: string[] = [];
const results = await Promise.allSettled(
sessionIndexers.map(async (indexer) => ({
provider: indexer.provider,
processed: await indexer.synchronize(lastScanAt),
})),
);
for (const result of results) {
if (result.status === 'fulfilled') {
processedByProvider[result.value.provider] = result.value.processed;
continue;
}
const reason = result.reason instanceof Error ? result.reason.message : String(result.reason);
failures.push(reason);
}
scanStateDb.updateLastScannedAt();
return {
processedByProvider,
failures,
};
},
/**
* Runs one provider indexer and updates `scan_state.last_scanned_at`.
*/
async synchronizeProvider(
provider: LLMProvider,
options: { fullRescan?: boolean } = {},
): Promise<{ provider: LLMProvider; processed: number }> {
const indexer = sessionIndexers.find((entry) => entry.provider === provider);
if (!indexer) {
throw new AppError(`No session indexer registered for provider "${provider}".`, {
code: 'SESSION_INDEXER_NOT_FOUND',
statusCode: 500,
});
}
const lastScanAt = options.fullRescan ? null : scanStateDb.getLastScannedAt();
const processed = await indexer.synchronize(lastScanAt);
scanStateDb.updateLastScannedAt();
return { provider, processed };
},
updateSessionCustomName(sessionId: string, sessionCustomName: string): void {
const sessionMetadata = sessionsDb.getSessionById(sessionId);
if (!sessionMetadata) {
throw new AppError('Session not found.', {
code: 'SESSION_NOT_FOUND',
statusCode: 404,
});
}
sessionsDb.updateSessionCustomName(sessionId, sessionCustomName);
},
/**
* Deletes a session artifact using only DB `jsonl_path`, then removes the DB row.
*/
async deleteSessionArtifacts(rawSessionId: string): Promise<{
sessionId: string;
deletedFromDisk: boolean;
deletedFromDatabase: boolean;
}> {
const sessionId = sanitizeSessionId(rawSessionId);
const existingSession = sessionsDb.getSessionById(sessionId);
const jsonlPath = existingSession?.jsonl_path ?? null;
const deletedFromDisk = jsonlPath ? await removeFileIfExists(jsonlPath) : false;
if (existingSession) {
sessionsDb.deleteSession(sessionId);
}
return {
sessionId,
deletedFromDisk,
deletedFromDatabase: Boolean(existingSession),
};
},
/**
* Reads session history directly from `sessions.jsonl_path` without legacy fetchers.
*/
async getSessionHistory(sessionId: string): Promise<SessionHistoryPayload> {
const session = sessionsDb.getSessionById(sessionId);
if (!session) {
throw new AppError(`Session "${sessionId}" was not found.`, {
code: 'SESSION_NOT_FOUND',
statusCode: 404,
});
}
if (!session.jsonl_path) {
throw new AppError(`Session "${sessionId}" does not have a history file path.`, {
code: 'SESSION_HISTORY_NOT_AVAILABLE',
statusCode: 404,
});
}
const filePath = session.jsonl_path;
const fileContent = await readFile(filePath, 'utf8');
const extension = path.extname(filePath).toLowerCase();
const isGeminiJson = session.provider === 'gemini' || extension === '.json';
return {
sessionId: session.session_id,
provider: session.provider,
workspacePath: session.workspace_path,
filePath,
fileType: isGeminiJson ? 'json' : 'jsonl',
entries: isGeminiJson ? parseJson(fileContent) : parseJsonl(fileContent),
};
},
};

View File

@@ -0,0 +1,131 @@
import chokidar from 'chokidar';
import os from 'node:os';
import path from 'node:path';
import { promises as fsPromises } from 'node:fs';
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
import type { LLMProvider } from '@/shared/types/app.js';
import { logger } from '@/shared/utils/logger.js';
// File system watchers for provider project/session folders
const PROVIDER_WATCH_PATHS: Array<{ provider: LLMProvider; rootPath: string }> = [
{
provider: 'claude',
rootPath: path.join(os.homedir(), '.claude', 'projects'),
},
{
provider: 'cursor',
rootPath: path.join(os.homedir(), '.cursor', 'chats'),
},
{
provider: 'codex',
rootPath: path.join(os.homedir(), '.codex', 'sessions'),
},
{
provider: 'gemini',
rootPath: path.join(os.homedir(), '.gemini', 'sessions'),
},
{
provider: 'gemini',
rootPath: path.join(os.homedir(), '.gemini', 'tmp'),
},
];
const WATCHER_IGNORED_PATTERNS = [
'**/node_modules/**',
'**/.git/**',
'**/dist/**',
'**/build/**',
'**/*.tmp',
'**/*.swp',
'**/.DS_Store',
];
const watchers: any[] = [];
type EventType = 'add' | 'change';
/**
* Handles watcher update events and triggers provider index synchronization.
*/
async function onUpdate(
eventType: EventType,
filePath: string,
provider: LLMProvider,
): Promise<void> {
try {
const result = await llmSessionsService.synchronizeProvider(provider, { fullRescan: true });
logger.info(`LLM watcher sync complete for provider "${provider}" after ${eventType}`, {
filePath,
processed: result.processed,
});
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
logger.error(`LLM watcher sync failed for provider "${provider}"`, {
eventType,
filePath,
error: message,
});
}
}
/**
* Initializes LLM session watchers and performs an initial index sync.
*/
export async function initializeWatcher(): Promise<void> {
logger.info('Setting up LLM session watchers...');
const initialSync = await llmSessionsService.synchronizeSessions();
logger.info('Initial LLM session sync complete.', {
processedByProvider: initialSync.processedByProvider,
failures: initialSync.failures,
});
for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) {
try {
// chokidar v4 emits ENOENT via the "error" event for missing roots and will not auto-recover.
// Ensure provider folders exist before creating the watcher so watching stays active.
await fsPromises.mkdir(rootPath, { recursive: true });
const watcher = chokidar.watch(rootPath, {
ignored: WATCHER_IGNORED_PATTERNS,
persistent: true,
// Don't fire events for existing files on startup
ignoreInitial: true,
followSymlinks: false,
// Reasonable depth limit
depth: 6,
// Use polling to fix Windows fs.watch buffering/batching issues.
// It now stops relying on native filesystem events and checks for changes at intervals.
usePolling: true,
// Poll every 2000ms
interval: 2_000,
// Large binary files are more expensive to poll than text files.
binaryInterval: 6_000,
// Removed awaitWriteFinish to prevent delays when LLM streams to the file
});
watcher
.on('add', (filePath: string) => {
void onUpdate('add', filePath, provider);
})
.on('change', (filePath: string) => {
void onUpdate('change', filePath, provider);
})
.on('error', (error: unknown) => {
const message = error instanceof Error ? error.message : String(error);
logger.error(`LLM watcher error for provider "${provider}"`, {
error: message,
});
});
watchers.push(watcher);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
logger.error(`Failed to initialize LLM watcher for provider "${provider}"`, {
rootPath,
error: message,
});
}
}
}

View File

@@ -11,7 +11,7 @@ import {
} from '../../../projects.js';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { workspaceOriginalPathsDb } from '@/shared/database/repositories/workspace-original-paths.db.js';
import { deleteSession as deleteSessionFromProviders } from '@/modules/sessions/sessions.service.js';
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
import { authenticateToken } from '../auth/auth.middleware.js';
import { getWorkspaceNameFromPath, WORKSPACES_ROOT, validateWorkspacePath } from './projects.utils.js';
@@ -69,7 +69,7 @@ router.delete('/api/projects/:projectName/sessions/:sessionId', authenticateToke
try {
const { projectName, sessionId } = req.params;
console.log(`[API] Deleting session: ${sessionId} from project: ${projectName}`);
await deleteSessionFromProviders(sessionId);
await llmSessionsService.deleteSessionArtifacts(sessionId);
console.log(`[API] Session ${sessionId} deleted successfully`);
res.json({ success: true });
} catch (error) {

View File

@@ -1,116 +0,0 @@
import os from 'os';
import path from 'path';
import fsp from 'node:fs/promises';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { buildLookupMap, extractFirstValidJsonlData, findFilesRecursivelyCreatedAfterLastScan } from '@/modules/providers/shared/session-parser.utils.js';
import { SessionData } from '@/shared/types/session.js';
export async function processClaudeSessionFile(file: string, nameMap?: Map<string, string>): Promise<SessionData | null> {
if (!nameMap) {
const base = path.join(os.homedir(), '.claude');
nameMap = await buildLookupMap(path.join(base, 'history.jsonl'), 'sessionId', 'display');
}
// Claude puts cwd and sessionId directly on the root object
return extractFirstValidJsonlData(file, (data) => ({
workspacePath: data?.cwd,
sessionId: data?.sessionId,
sessionName: nameMap!.get(data?.sessionId) || 'Untitled Claude Session'
}));
}
export async function processClaudeSessions() {
const base = path.join(os.homedir(), '.claude');
// Pre-load names from history index
const nameMap = await buildLookupMap(path.join(base, 'history.jsonl'), 'sessionId', 'display');
const files = await findFilesRecursivelyCreatedAfterLastScan(path.join(base, 'projects'), '.jsonl');
for (const file of files) {
const result = await processClaudeSessionFile(file, nameMap);
if (result) {
let createdAt: string | undefined;
let updatedAt: string | undefined;
try {
const stat = await fsp.stat(file);
createdAt = stat.birthtime.toISOString();
updatedAt = stat.mtime.toISOString();
} catch {
// Ignore stat failures and let DB defaults handle created_at/updated_at.
}
sessionsDb.createSession(
result.sessionId,
'claude',
result.workspacePath,
result.sessionName,
createdAt,
updatedAt,
file,
);
}
}
}
function encodeClaudeProjectPath(projectPath: string): string {
return projectPath.replace(/[^a-zA-Z0-9-]/g, '-');
}
async function removeFileIfExists(filePath: string): Promise<boolean> {
try {
await fsp.unlink(filePath);
return true;
} catch (error: any) {
if (error?.code === 'ENOENT') {
return false;
}
throw error;
}
}
async function listDirectoryEntriesSafe(directoryPath: string): Promise<import('node:fs').Dirent[]> {
try {
return await fsp.readdir(directoryPath, { withFileTypes: true });
} catch {
return [];
}
}
async function findFilesByName(rootPath: string, fileName: string): Promise<string[]> {
const matches: string[] = [];
const stack = [rootPath];
while (stack.length > 0) {
const currentPath = stack.pop() as string;
const entries = await listDirectoryEntriesSafe(currentPath);
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name);
if (entry.isDirectory()) {
stack.push(fullPath);
} else if (entry.isFile() && entry.name === fileName) {
matches.push(fullPath);
}
}
}
return matches;
}
export async function deleteClaudeSession(sessionId: string, workspacePath?: string): Promise<boolean> {
const claudeProjectsDir = path.join(os.homedir(), '.claude', 'projects');
const fileName = `${sessionId}.jsonl`;
let deleted = false;
if (workspacePath) {
const encodedPath = encodeClaudeProjectPath(workspacePath);
const candidateFilePath = path.join(claudeProjectsDir, encodedPath, fileName);
deleted = (await removeFileIfExists(candidateFilePath)) || deleted;
}
const matches = await findFilesByName(claudeProjectsDir, fileName);
for (const filePath of matches) {
deleted = (await removeFileIfExists(filePath)) || deleted;
}
return deleted;
}

View File

@@ -1 +0,0 @@

View File

@@ -1,178 +0,0 @@
import os from 'os';
import path from 'path';
import fsp from 'node:fs/promises';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { buildLookupMap, extractFirstValidJsonlData, findFilesRecursivelyCreatedAfterLastScan } from '@/modules/providers/shared/session-parser.utils.js';
import { SessionData } from '@/shared/types/session.js';
export async function processCodexSessionFile(file: string, nameMap?: Map<string, string>): Promise<SessionData | null> {
if (!nameMap) {
const base = path.join(os.homedir(), '.codex');
nameMap = await buildLookupMap(path.join(base, 'session_index.jsonl'), 'id', 'thread_name');
}
// Codex nests the required data inside a `payload` object
return extractFirstValidJsonlData(file, (data) => ({
workspacePath: data?.payload?.cwd,
sessionId: data?.payload?.id,
sessionName: nameMap!.get(data?.payload?.id) || 'Untitled Codex Session'
}));
}
export async function processCodexSessions() {
const base = path.join(os.homedir(), '.codex');
// Use the thread_name attribute as requested
const nameMap = await buildLookupMap(path.join(base, 'session_index.jsonl'), 'id', 'thread_name');
const files = await findFilesRecursivelyCreatedAfterLastScan(path.join(base, 'sessions'), '.jsonl');
for (const file of files) {
const result = await processCodexSessionFile(file, nameMap);
if (result) {
let createdAt: string | undefined;
let updatedAt: string | undefined;
try {
const stat = await fsp.stat(file);
createdAt = stat.birthtime.toISOString();
updatedAt = stat.mtime.toISOString();
} catch {
// Ignore stat failures and let DB defaults handle created_at/updated_at.
}
sessionsDb.createSession(
result.sessionId,
'codex',
result.workspacePath,
result.sessionName,
createdAt,
updatedAt,
file,
);
}
}
}
function getPathNumberVariants(value: number): string[] {
const unpadded = String(value);
const padded = unpadded.padStart(2, '0');
if (unpadded === padded) {
return [unpadded];
}
return [unpadded, padded];
}
function buildCodexDatePathParts(createdAt: string): Array<{ year: string; month: string; day: string }> {
const parsedDate = new Date(createdAt);
if (Number.isNaN(parsedDate.getTime())) {
return [];
}
const localDate = {
year: String(parsedDate.getFullYear()),
month: parsedDate.getMonth() + 1,
day: parsedDate.getDate(),
};
const utcDate = {
year: String(parsedDate.getUTCFullYear()),
month: parsedDate.getUTCMonth() + 1,
day: parsedDate.getUTCDate(),
};
const rawDateParts =
localDate.year === utcDate.year &&
localDate.month === utcDate.month &&
localDate.day === utcDate.day
? [localDate]
: [localDate, utcDate];
const uniqueDateParts = new Map<string, { year: string; month: string; day: string }>();
for (const datePart of rawDateParts) {
const monthVariants = getPathNumberVariants(datePart.month);
const dayVariants = getPathNumberVariants(datePart.day);
for (const month of monthVariants) {
for (const day of dayVariants) {
uniqueDateParts.set(`${datePart.year}-${month}-${day}`, {
year: datePart.year,
month,
day,
});
}
}
}
return [...uniqueDateParts.values()];
}
async function removeFileIfExists(filePath: string): Promise<boolean> {
try {
await fsp.unlink(filePath);
return true;
} catch (error: any) {
if (error?.code === 'ENOENT') {
return false;
}
throw error;
}
}
async function listDirectoryEntriesSafe(directoryPath: string): Promise<import('node:fs').Dirent[]> {
try {
return await fsp.readdir(directoryPath, { withFileTypes: true });
} catch {
return [];
}
}
async function findFilesByName(rootPath: string, fileName: string): Promise<string[]> {
const matches: string[] = [];
const stack = [rootPath];
while (stack.length > 0) {
const currentPath = stack.pop() as string;
const entries = await listDirectoryEntriesSafe(currentPath);
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name);
if (entry.isDirectory()) {
stack.push(fullPath);
} else if (entry.isFile() && entry.name === fileName) {
matches.push(fullPath);
}
}
}
return matches;
}
export async function deleteCodexSession(sessionId: string, createdAt?: string): Promise<boolean> {
const codexSessionsDir = path.join(os.homedir(), '.codex', 'sessions');
const fileName = `${sessionId}.jsonl`;
let deleted = false;
if (createdAt) {
const datePathParts = buildCodexDatePathParts(createdAt);
for (const parts of datePathParts) {
const candidateFilePath = path.join(
codexSessionsDir,
parts.year,
parts.month,
parts.day,
fileName,
);
deleted = (await removeFileIfExists(candidateFilePath)) || deleted;
}
}
if (!deleted) {
const matches = await findFilesByName(codexSessionsDir, fileName);
for (const filePath of matches) {
deleted = (await removeFileIfExists(filePath)) || deleted;
}
}
return deleted;
}

View File

@@ -1,208 +0,0 @@
import os from 'os';
import path from 'path';
import fs from 'node:fs';
import fsp from 'node:fs/promises';
import readline from 'readline';
import crypto from 'node:crypto';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { extractFirstValidJsonlData, findFilesRecursivelyCreatedAfterLastScan } from '@/modules/providers/shared/session-parser.utils.js';
import { SessionData } from '@/shared/types/session.js';
function md5(input: string): string {
return crypto.createHash('md5').update(input).digest('hex');
}
async function removeFileIfExists(filePath: string): Promise<boolean> {
try {
await fsp.unlink(filePath);
return true;
} catch (error: any) {
if (error?.code === 'ENOENT') {
return false;
}
throw error;
}
}
async function removeDirectoryIfExists(directoryPath: string): Promise<boolean> {
try {
await fsp.rm(directoryPath, { recursive: true, force: false });
return true;
} catch (error: any) {
if (error?.code === 'ENOENT') {
return false;
}
throw error;
}
}
async function listDirectoryEntriesSafe(directoryPath: string): Promise<import('node:fs').Dirent[]> {
try {
return await fsp.readdir(directoryPath, { withFileTypes: true });
} catch {
return [];
}
}
async function findDirectoriesByName(rootPath: string, directoryName: string): Promise<string[]> {
const matches: string[] = [];
const stack = [rootPath];
while (stack.length > 0) {
const currentPath = stack.pop() as string;
const entries = await listDirectoryEntriesSafe(currentPath);
for (const entry of entries) {
if (!entry.isDirectory()) {
continue;
}
const fullPath = path.join(currentPath, entry.name);
if (entry.name === directoryName) {
matches.push(fullPath);
}
stack.push(fullPath);
}
}
return matches;
}
async function findFilesByName(rootPath: string, fileName: string): Promise<string[]> {
const matches: string[] = [];
const stack = [rootPath];
while (stack.length > 0) {
const currentPath = stack.pop() as string;
const entries = await listDirectoryEntriesSafe(currentPath);
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name);
if (entry.isDirectory()) {
stack.push(fullPath);
} else if (entry.isFile() && entry.name === fileName) {
matches.push(fullPath);
}
}
}
return matches;
}
export async function extractWorkspacePathFromWorkerLog(filePath: string): Promise<string | null> {
try {
const fileStream = fs.createReadStream(filePath, { encoding: 'utf8' });
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
for await (const line of rl) {
const match = line.match(/workspacePath=(.*)$/);
const firstMatch = match?.[1];
if (firstMatch) {
rl.close();
fileStream.close();
return firstMatch;
}
}
} catch {
// ignore errors
}
return null;
}
export async function processCursorSessionFile(file: string): Promise<SessionData | null> {
const sessionId = path.basename(file, '.jsonl');
const grandparentDir = path.dirname(path.dirname(file));
const workerLogPath = path.join(grandparentDir, 'worker.log');
const workspacePath = await extractWorkspacePathFromWorkerLog(workerLogPath);
if (!workspacePath) return null;
return extractFirstValidJsonlData(file, (lineJson) => {
if (lineJson.role === 'user') {
const rawText = lineJson.message?.content?.[0]?.text || '';
// Strip <user_query> tags and trim
const cleanName = rawText.replace(/<\/?user_query>/g, '').trim().split('\n');
return { sessionId: sessionId as string, workspacePath, sessionName: cleanName[0] || "Untitled Cursor Session" };
}
return null;
});
}
export async function processCursorSessions() {
try {
const cursorBase = path.join(os.homedir(), '.cursor');
const projectsDir = path.join(cursorBase, 'projects');
const projectDirs = await fsp.readdir(projectsDir);
const seenWorkspacePaths = new Set<string>();
for (const projectDir of projectDirs) {
const workerLogPath = path.join(projectsDir, projectDir, 'worker.log');
const workspacePath = await extractWorkspacePathFromWorkerLog(workerLogPath);
if (!workspacePath || seenWorkspacePaths.has(workspacePath)) continue;
seenWorkspacePaths.add(workspacePath);
const workspaceHash = md5(workspacePath);
const chatsDir = path.join(cursorBase, 'chats', workspaceHash);
const sessionFiles = await findFilesRecursivelyCreatedAfterLastScan(chatsDir, '.jsonl');
for (const file of sessionFiles) {
const result = await processCursorSessionFile(file);
if (result) {
let createdAt: string | undefined;
let updatedAt: string | undefined;
try {
const stat = await fsp.stat(file);
createdAt = stat.birthtime.toISOString();
updatedAt = stat.mtime.toISOString();
} catch {
// Ignore stat failures and let DB defaults handle created_at/updated_at.
}
sessionsDb.createSession(
result.sessionId,
'cursor',
result.workspacePath,
result.sessionName,
createdAt,
updatedAt,
file,
);
}
}
}
} catch (e) {
// Base cursor directory or projects directory likely doesn't exist
}
}
export async function deleteCursorSession(sessionId: string, workspacePath?: string): Promise<boolean> {
const cursorChatsDir = path.join(os.homedir(), '.cursor', 'chats');
let deleted = false;
if (workspacePath) {
const cwdId = md5(workspacePath);
const candidateDir = path.join(cursorChatsDir, cwdId, sessionId);
deleted = (await removeDirectoryIfExists(candidateDir)) || deleted;
}
const sessionDirs = await findDirectoriesByName(cursorChatsDir, sessionId);
for (const directoryPath of sessionDirs) {
deleted = (await removeDirectoryIfExists(directoryPath)) || deleted;
}
const jsonlFiles = await findFilesByName(cursorChatsDir, `${sessionId}.jsonl`);
for (const filePath of jsonlFiles) {
deleted = (await removeFileIfExists(filePath)) || deleted;
}
return deleted;
}

View File

@@ -1,172 +0,0 @@
import os from 'os';
import path from 'path';
import fsp from 'node:fs/promises';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { findFilesRecursivelyCreatedAfterLastScan } from '@/modules/providers/shared/session-parser.utils.js';
import { SessionData } from '@/shared/types/session.js';
export async function processGeminiSessionFile(file: string): Promise<SessionData | null> {
try {
const fileContent = await fsp.readFile(file, 'utf8');
const data = JSON.parse(fileContent);
// Check for new format: data.sessionId
// Fallback for old format: data.id and data.projectPath
if (data?.sessionId || (data?.id && data?.projectPath)) {
let sessionId = data.sessionId || data.id;
let workspacePath = data.projectPath || '';
let sessionName = 'New Gemini Chat';
// Extract workspacePath for new format
if (data?.sessionId && file.includes(`${path.sep}chats${path.sep}`)) {
const chatsDir = path.dirname(file);
const workspaceDir = path.dirname(chatsDir);
const projectRootFile = path.join(workspaceDir, '.project_root');
try {
const rootContent = await fsp.readFile(projectRootFile, 'utf8');
if (rootContent) {
workspacePath = rootContent.trim();
}
} catch (e) {
// Ignore if .project_root doesn't exist
}
}
// Extract sessionName
if (data.messages && Array.isArray(data.messages) && data.messages.length > 0) {
const firstMessage = data.messages[0];
if (firstMessage?.content && Array.isArray(firstMessage.content) && firstMessage.content.length > 0) {
sessionName = firstMessage.content[0]?.text?.trim() || sessionName;
} else if (firstMessage?.content && typeof firstMessage.content === 'string') {
sessionName = firstMessage.content.trim() || sessionName;
}
} else if (data.messages?.[0]?.content) {
// old format fallback
sessionName = data.messages[0].content;
}
// Clean up sessionName
if (sessionName) {
sessionName = sessionName.replace(/\n/g, ' ').trim().substring(0, 100);
}
return {
sessionId,
workspacePath,
sessionName
};
}
} catch (e) {
// Ignore parsing error for gemini
}
return null;
}
export async function processGeminiSessions() {
const geminiHome = path.join(os.homedir(), '.gemini');
// Process old sessions directory
const oldGeminiPath = path.join(geminiHome, 'sessions');
const oldFiles = await findFilesRecursivelyCreatedAfterLastScan(oldGeminiPath, '.json');
// Process new tmp/chats directories
const tmpGeminiPath = path.join(geminiHome, 'tmp');
const tmpFiles = await findFilesRecursivelyCreatedAfterLastScan(tmpGeminiPath, '.json');
const files = [...oldFiles, ...tmpFiles];
for (const file of files) {
// For tmp files, only process those inside a 'chats' directory
if (file.startsWith(tmpGeminiPath) && !file.includes(`${path.sep}chats${path.sep}`)) {
continue;
}
const result = await processGeminiSessionFile(file);
if (result) {
let createdAt: string | undefined;
let updatedAt: string | undefined;
try {
const stat = await fsp.stat(file);
createdAt = stat.birthtime.toISOString();
updatedAt = stat.mtime.toISOString();
} catch {
// Ignore stat failures and let DB defaults handle created_at/updated_at.
}
sessionsDb.createSession(
result.sessionId,
'gemini',
result.workspacePath,
result.sessionName,
createdAt,
updatedAt,
file,
);
}
}
}
async function removeFileIfExists(filePath: string): Promise<boolean> {
try {
await fsp.unlink(filePath);
return true;
} catch (error: any) {
if (error?.code === 'ENOENT') {
return false;
}
throw error;
}
}
async function listDirectoryEntriesSafe(directoryPath: string): Promise<import('node:fs').Dirent[]> {
try {
return await fsp.readdir(directoryPath, { withFileTypes: true });
} catch {
return [];
}
}
export async function deleteGeminiSession(sessionId: string): Promise<boolean> {
const geminiHome = path.join(os.homedir(), '.gemini');
const geminiSessionsDir = path.join(geminiHome, 'sessions');
const geminiTmpDir = path.join(geminiHome, 'tmp');
let deleted = false;
deleted = (await removeFileIfExists(path.join(geminiSessionsDir, `${sessionId}.json`))) || deleted;
deleted = (await removeFileIfExists(path.join(geminiSessionsDir, `${sessionId}.jsonl`))) || deleted;
const projectDirs = await listDirectoryEntriesSafe(geminiTmpDir);
for (const projectDir of projectDirs) {
if (!projectDir.isDirectory()) {
continue;
}
const chatsDir = path.join(geminiTmpDir, projectDir.name, 'chats');
const chatFiles = await listDirectoryEntriesSafe(chatsDir);
for (const chatFile of chatFiles) {
if (!chatFile.isFile() || !chatFile.name.endsWith('.json')) {
continue;
}
const chatFilePath = path.join(chatsDir, chatFile.name);
if (chatFile.name === `${sessionId}.json`) {
deleted = (await removeFileIfExists(chatFilePath)) || deleted;
continue;
}
try {
const content = await fsp.readFile(chatFilePath, 'utf8');
const parsed = JSON.parse(content);
const parsedId = parsed?.sessionId || parsed?.id;
if (parsedId === sessionId) {
deleted = (await removeFileIfExists(chatFilePath)) || deleted;
}
} catch {
// Ignore unreadable/malformed session files.
}
}
}
return deleted;
}

View File

@@ -1,96 +0,0 @@
import fs from 'node:fs';
import fsp from 'node:fs/promises';
import readline from 'readline';
import path from 'path';
import { scanStateDb } from '@/shared/database/repositories/scan-state.db.js';
import { SessionData } from '@/shared/types/session.js';
/**
* Reads a JSONL file and builds a Map of Key -> Value.
* Useful for index files like history.jsonl or session_index.jsonl.
*/
export async function buildLookupMap(filePath: string, keyField: string, valueField: string): Promise<Map<string, string>> {
const lookup = new Map<string, string>();
try {
const fileStream = fs.createReadStream(filePath);
const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
for await (const line of rl) {
if (!line.trim()) continue;
const data = JSON.parse(line);
// We use the first occurrence. In history files, this is usually the start of the thread.
if (data[keyField] && data[valueField] && !lookup.has(data[keyField])) {
lookup.set(data[keyField], data[valueField]);
}
}
} catch (e) { /* File might not exist yet */ }
return lookup;
}
/**
* Recursively walks a directory tree and returns a flat array of all files
* matching a specific extension (e.g., '.jsonl' or '.json').
* It will only find the files created after
*/
export async function findFilesRecursivelyCreatedAfterLastScan(
dirPath: string,
extension: string,
fileList: string[] = []
): Promise<string[]> {
try {
const entries = await fsp.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
await findFilesRecursivelyCreatedAfterLastScan(fullPath, extension, fileList);
} else if (entry.isFile() && entry.name.endsWith(extension)) {
const lastScanDate = scanStateDb.getLastScannedAt();
if (lastScanDate) {
// Check file CREATION time (birthtime) against our last scan time
const stats = await fsp.stat(fullPath);
if (stats.birthtime > lastScanDate) {
fileList.push(fullPath);
}
} else {
fileList.push(fullPath);
}
}
}
} catch (e) {
// Fail silently for directories that don't exist or lack read permissions
}
return fileList;
}
/**
* Reads a file line-by-line, parsing each line as JSON.
* It passes the parsed JSON to a custom `extractorFn`. As soon as the extractor
* successfully finds both a sessionId and workspacePath, it closes the file and returns.
*/
export async function extractFirstValidJsonlData(
filePath: string,
extractorFn: (parsedJson: any) => Partial<SessionData> | null | undefined
): Promise<SessionData | null> {
try {
const fileStream = fs.createReadStream(filePath);
const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
for await (const line of rl) {
if (!line.trim()) continue;
const parsedData = JSON.parse(line);
const extracted = extractorFn(parsedData);
// If our custom extractor found what we need, return early
if (extracted?.sessionId && extracted?.workspacePath) {
rl.close();
fileStream.close();
return extracted as SessionData;
}
}
} catch (e) {
// Ignored errors
}
return null;
}

View File

@@ -1 +0,0 @@

View File

@@ -1,226 +0,0 @@
import express from 'express';
import path from 'path';
import os from 'os';
import { promises as fsPromises } from 'fs';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { extractProjectDirectory } from '../../../projects.js';
import { authenticateToken } from '../auth/auth.middleware.js';
const router = express.Router();
const VALID_PROVIDERS = ['claude', 'codex', 'cursor', 'gemini'];
// Rename session endpoint
router.put('/api/sessions/:sessionId/rename', authenticateToken, async (req, res) => {
try {
const { sessionId } = req.params;
const safeSessionId = String(sessionId).replace(/[^a-zA-Z0-9._-]/g, '');
if (!safeSessionId || safeSessionId !== String(sessionId)) {
return res.status(400).json({ error: 'Invalid sessionId' });
}
const { summary, provider } = req.body;
if (!summary || typeof summary !== 'string' || summary.trim() === '') {
return res.status(400).json({ error: 'Summary is required' });
}
if (summary.trim().length > 500) {
return res.status(400).json({ error: 'Summary must not exceed 500 characters' });
}
if (provider && !VALID_PROVIDERS.includes(provider)) {
return res.status(400).json({ error: `Provider must be one of: ${VALID_PROVIDERS.join(', ')}` });
}
sessionsDb.updateSessionCustomName(safeSessionId, summary.trim());
res.json({ success: true });
} catch (error) {
console.error(`[API] Error renaming session ${req.params.sessionId}:`, error);
res.status(500).json({ error: error.message });
}
});
// Get token usage for a specific session
router.get('/api/projects/:projectName/sessions/:sessionId/token-usage', authenticateToken, async (req, res) => {
try {
const { projectName, sessionId } = req.params;
const { provider = 'claude' } = req.query;
const homeDir = os.homedir();
// Allow only safe characters in sessionId
const safeSessionId = String(sessionId).replace(/[^a-zA-Z0-9._-]/g, '');
if (!safeSessionId || safeSessionId !== String(sessionId)) {
return res.status(400).json({ error: 'Invalid sessionId' });
}
// Handle Cursor sessions - they use SQLite and don't have token usage info
if (provider === 'cursor') {
return res.json({
used: 0,
total: 0,
breakdown: { input: 0, cacheCreation: 0, cacheRead: 0 },
unsupported: true,
message: 'Token usage tracking not available for Cursor sessions'
});
}
// Handle Gemini sessions - they are raw logs in our current setup
if (provider === 'gemini') {
return res.json({
used: 0,
total: 0,
breakdown: { input: 0, cacheCreation: 0, cacheRead: 0 },
unsupported: true,
message: 'Token usage tracking not available for Gemini sessions'
});
}
// Handle Codex sessions
if (provider === 'codex') {
const codexSessionsDir = path.join(homeDir, '.codex', 'sessions');
// Find the session file by searching for the session ID
const findSessionFile = async (dir) => {
try {
const entries = await fsPromises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
const found = await findSessionFile(fullPath);
if (found) return found;
} else if (entry.name.includes(safeSessionId) && entry.name.endsWith('.jsonl')) {
return fullPath;
}
}
} catch (error) {
// Skip directories we can't read
}
return null;
};
const sessionFilePath = await findSessionFile(codexSessionsDir);
if (!sessionFilePath) {
return res.status(404).json({ error: 'Codex session file not found', sessionId: safeSessionId });
}
// Read and parse the Codex JSONL file
let fileContent;
try {
fileContent = await fsPromises.readFile(sessionFilePath, 'utf8');
} catch (error) {
if (error.code === 'ENOENT') {
return res.status(404).json({ error: 'Session file not found', path: sessionFilePath });
}
throw error;
}
const lines = fileContent.trim().split('\n');
let totalTokens = 0;
let contextWindow = 200000; // Default for Codex/OpenAI
// Find the latest token_count event with info (scan from end)
for (let i = lines.length - 1; i >= 0; i--) {
try {
const entry = JSON.parse(lines[i]);
// Codex stores token info in event_msg with type: "token_count"
if (entry.type === 'event_msg' && entry.payload?.type === 'token_count' && entry.payload?.info) {
const tokenInfo = entry.payload.info;
if (tokenInfo.total_token_usage) {
totalTokens = tokenInfo.total_token_usage.total_tokens || 0;
}
if (tokenInfo.model_context_window) {
contextWindow = tokenInfo.model_context_window;
}
break; // Stop after finding the latest token count
}
} catch (parseError) {
// Skip lines that can't be parsed
continue;
}
}
return res.json({
used: totalTokens,
total: contextWindow
});
}
// Handle Claude sessions (default)
// Extract actual project path
let projectPath;
try {
projectPath = await extractProjectDirectory(projectName);
} catch (error) {
console.error('Error extracting project directory:', error);
return res.status(500).json({ error: 'Failed to determine project path' });
}
// Construct the JSONL file path
// Claude stores session files in ~/.claude/projects/[encoded-project-path]/[session-id].jsonl
// The encoding replaces any non-alphanumeric character (except -) with -
const encodedPath = projectPath.replace(/[^a-zA-Z0-9-]/g, '-');
const projectDir = path.join(homeDir, '.claude', 'projects', encodedPath);
const jsonlPath = path.join(projectDir, `${safeSessionId}.jsonl`);
// Constrain to projectDir
const rel = path.relative(path.resolve(projectDir), path.resolve(jsonlPath));
if (rel.startsWith('..') || path.isAbsolute(rel)) {
return res.status(400).json({ error: 'Invalid path' });
}
// Read and parse the JSONL file
let fileContent;
try {
fileContent = await fsPromises.readFile(jsonlPath, 'utf8');
} catch (error) {
if (error.code === 'ENOENT') {
return res.status(404).json({ error: 'Session file not found', path: jsonlPath });
}
throw error; // Re-throw other errors to be caught by outer try-catch
}
const lines = fileContent.trim().split('\n');
const parsedContextWindow = parseInt(process.env.CONTEXT_WINDOW, 10);
const contextWindow = Number.isFinite(parsedContextWindow) ? parsedContextWindow : 160000;
let inputTokens = 0;
let cacheCreationTokens = 0;
let cacheReadTokens = 0;
// Find the latest assistant message with usage data (scan from end)
for (let i = lines.length - 1; i >= 0; i--) {
try {
const entry = JSON.parse(lines[i]);
// Only count assistant messages which have usage data
if (entry.type === 'assistant' && entry.message?.usage) {
const usage = entry.message.usage;
// Use token counts from latest assistant message only
inputTokens = usage.input_tokens || 0;
cacheCreationTokens = usage.cache_creation_input_tokens || 0;
cacheReadTokens = usage.cache_read_input_tokens || 0;
break; // Stop after finding the latest assistant message
}
} catch (parseError) {
// Skip lines that can't be parsed
continue;
}
}
// Calculate total context usage (excluding output_tokens, as per ccusage)
const totalUsed = inputTokens + cacheCreationTokens + cacheReadTokens;
res.json({
used: totalUsed,
total: contextWindow,
breakdown: {
input: inputTokens,
cacheCreation: cacheCreationTokens,
cacheRead: cacheReadTokens
}
});
} catch (error) {
console.error('Error reading session token usage:', error);
res.status(500).json({ error: 'Failed to read session token usage' });
}
});
export default router;

View File

@@ -1,64 +0,0 @@
import { scanStateDb } from '@/shared/database/repositories/scan-state.db.js';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { processClaudeSessions, deleteClaudeSession } from '@/modules/providers/claude/claude.session-processor.js';
import { processCodexSessions, deleteCodexSession } from '@/modules/providers/codex/codex.session-processor.js';
import { processGeminiSessions, deleteGeminiSession } from '@/modules/providers/gemini/gemini.session-processor.js';
import { processCursorSessions, deleteCursorSession } from '@/modules/providers/cursor/cursor.session-processor.js';
const SESSION_ID_PATTERN = /^[a-zA-Z0-9._-]{1,120}$/;
function sanitizeSessionId(sessionId: string): string {
const value = String(sessionId || '').trim();
if (!SESSION_ID_PATTERN.test(value)) {
throw new Error('Invalid session ID format');
}
return value;
}
export async function processSessions() {
// 1. Start the timer with a unique label
console.time('Workspace sync total time');
console.log('Starting workspace sync...');
try {
// Wrapping in Promise.all allows these to process concurrently, speeding up the boot time
await Promise.allSettled([
processClaudeSessions(),
processCodexSessions(),
processGeminiSessions(),
processCursorSessions()
]);
scanStateDb.updateLastScannedAt();
} catch (error) {
console.error('An error occurred during sync:', error);
} finally {
console.log('----------------------------------');
// 2. Stop the timer using the exact same label
// This will print: Workspace sync total time: 123.456ms
console.timeEnd('Workspace sync total time');
console.log('Workspace synchronization complete.');
}
}
export async function deleteSession(sessionId: string): Promise<void> {
const safeSessionId = sanitizeSessionId(sessionId);
const existingSession = sessionsDb.getSessionById(safeSessionId);
const workspacePath = existingSession?.workspace_path;
const createdAt = existingSession?.created_at;
const deletionResults = await Promise.allSettled([
deleteClaudeSession(safeSessionId, workspacePath),
deleteCodexSession(safeSessionId, createdAt),
deleteGeminiSession(safeSessionId),
deleteCursorSession(safeSessionId, workspacePath),
]);
const rejectedResult = deletionResults.find((result) => result.status === 'rejected') as PromiseRejectedResult | undefined;
if (rejectedResult) {
throw rejectedResult.reason;
}
sessionsDb.deleteSession(safeSessionId);
}

View File

@@ -1,184 +0,0 @@
import chokidar from "chokidar";
import path from "path";
import os from "os";
import { promises as fsPromises } from "fs";
import { logger } from "@/shared/utils/logger.js";
import { processSessions } from "@/modules/sessions/sessions.service.js";
import { processClaudeSessionFile } from "@/modules/providers/claude/claude.session-processor.js";
import { processCodexSessionFile } from "@/modules/providers/codex/codex.session-processor.js";
import { processGeminiSessionFile } from "@/modules/providers/gemini/gemini.session-processor.js";
import { processCursorSessionFile } from "@/modules/providers/cursor/cursor.session-processor.js";
import { sessionsDb } from "@/shared/database/repositories/sessions.db.js";
import { LLMProvider } from "@/shared/types/app.js";
let projectsWatchers: any[] = [];
// File system watchers for provider project/session folders
const PROVIDER_WATCH_PATHS: { provider: LLMProvider; rootPath: string }[] = [
{
provider: "claude",
rootPath: path.join(os.homedir(), ".claude", "projects"),
},
{
provider: "cursor",
rootPath: path.join(os.homedir(), ".cursor", "chats")
},
{
provider: "codex",
rootPath: path.join(os.homedir(), ".codex", "sessions"),
},
{
provider: "gemini",
rootPath: path.join(os.homedir(), ".gemini", "sessions"),
},
{
provider: "gemini",
rootPath: path.join(os.homedir(), ".gemini", "tmp"),
},
];
const WATCHER_IGNORED_PATTERNS = [
"**/node_modules/**",
"**/.git/**",
"**/dist/**",
"**/build/**",
"**/*.tmp",
"**/*.swp",
"**/.DS_Store",
];
type EventType = "add" | "change";
const onUpdate = async (
eventType: EventType,
filePath: string,
provider: LLMProvider,
) => {
try {
console.log("[eventType] detected: ", eventType, " filePath: ", filePath, " provider: ", provider);
switch (eventType) {
case "add":
case "change": {
let sessionId: string | null = null;
let workspacePath: string | null = null;
let sessionName = `Untitled ${provider} Session`;
switch (provider) {
case "claude": {
const result = await processClaudeSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
case "codex": {
const result = await processCodexSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
case "gemini": {
const result = await processGeminiSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
case "cursor": {
const result = await processCursorSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
}
if (sessionId && workspacePath) {
let createdAt: string | undefined;
let updatedAt: string | undefined;
try {
const stat = await fsPromises.stat(filePath);
createdAt = stat.birthtime.toISOString();
updatedAt = stat.mtime.toISOString();
} catch {
// Ignore stat failures and let DB defaults handle created_at/updated_at.
}
sessionsDb.createSession(
sessionId,
provider,
workspacePath,
sessionName,
createdAt,
updatedAt,
filePath,
);
}
break;
}
}
} catch (error: any) {
logger.error(
`[ERROR] Failed to handle ${provider} file change for ${filePath}:`,
error,
);
}
};
// Setup file system watchers for Claude, Cursor, and Codex project/session folders
export async function initializeWatcher() {
logger.info("Setting up project watchers for providers...");
await processSessions();
for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) {
try {
// chokidar v4 emits ENOENT via the "error" event for missing roots and will not auto-recover.
// Ensure provider folders exist before creating the watcher so watching stays active.
await fsPromises.mkdir(rootPath, { recursive: true });
logger.info(`Setting up watcher for ${provider} at: ${rootPath}`);
const watcher = chokidar.watch(rootPath, {
ignored: WATCHER_IGNORED_PATTERNS,
persistent: true,
ignoreInitial: true, // Don't fire events for existing files on startup
followSymlinks: false,
depth: 6, // Reasonable depth limit
usePolling: true, // Use polling to fix Windows fs.watch buffering/batching issues. It now stops relying on the OS's native file-system events and instead manually checks the files for changes at a set interval.
interval: 2000, // Poll every 2000ms
binaryInterval: 6000, // We set a high amount because checking large binary files for changes using polling is much more CPU-intensive than checking small text files.
// Removed awaitWriteFinish to prevent delays when LLM streams to the file
});
// Set up event listeners
watcher
.on("add", (filePath) => onUpdate("add", filePath, provider))
.on("change", (filePath) =>
onUpdate("change", filePath, provider),
)
.on("error", (error: any) => {
logger.error(`[ERROR] ${provider} watcher error: ${error.message}`);
})
.on("ready", () => { });
projectsWatchers.push(watcher);
} catch (error: any) {
logger.error(
`[ERROR] Failed to setup ${provider} watcher for ${rootPath}:`,
error,
);
}
}
}

View File

@@ -1,171 +0,0 @@
import express, { type Request, type Response } from 'express';
import { authenticateToken } from '@/modules/auth/auth.middleware.js';
import {
deleteSessionById,
deleteWorkspaceByPath,
getWorkspaceSessionsCollection,
updateSessionNameById,
updateWorkspaceNameByPath,
updateWorkspaceStarByPath,
} from '@/modules/sidebar/sidebar.service.js';
const router = express.Router();
const getTrimmedString = (value: unknown): string => {
if (typeof value !== 'string') {
return '';
}
return value.trim();
};
const getWorkspacePathFromBody = (req: Request): string => getTrimmedString(req.body?.workspacePath);
router.get(
'/api/sidebar/get-workspaces-sessions',
authenticateToken,
async (_req: Request, res: Response): Promise<void> => {
try {
const workspaces = getWorkspaceSessionsCollection();
res.json({ workspaces });
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to fetch workspaces';
res.status(500).json({ error: message });
}
},
);
router.put(
'/api/sidebar/update-workspace-star',
authenticateToken,
async (req: Request, res: Response): Promise<void> => {
try {
const workspacePath = getWorkspacePathFromBody(req);
if (!workspacePath) {
res.status(400).json({ error: 'workspacePath is required' });
return;
}
const isStarred = updateWorkspaceStarByPath(workspacePath);
res.json({ success: true, workspacePath, isStarred });
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to update workspace star';
const statusCode = message.toLowerCase().includes('not found') ? 404 : 500;
res.status(statusCode).json({ error: message });
}
},
);
router.put(
'/api/sidebar/update-workspace-custom-name',
authenticateToken,
async (req: Request, res: Response): Promise<void> => {
try {
const workspacePath = getWorkspacePathFromBody(req);
if (!workspacePath) {
res.status(400).json({ error: 'workspacePath is required' });
return;
}
const customWorkspaceName = getTrimmedString(req.body?.workspaceCustomName);
updateWorkspaceNameByPath(workspacePath, customWorkspaceName || null);
res.json({ success: true, workspacePath, workspaceCustomName: customWorkspaceName || null });
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to update workspace name';
res.status(500).json({ error: message });
}
},
);
router.put(
'/api/sidebar/update-session-custom-name',
authenticateToken,
async (req: Request, res: Response): Promise<void> => {
try {
const sessionId = getTrimmedString(req.body?.sessionId);
const sessionCustomName = getTrimmedString(req.body?.sessionCustomName);
if (!sessionId) {
res.status(400).json({ error: 'sessionId is required' });
return;
}
if (!sessionCustomName) {
res.status(400).json({ error: 'sessionCustomName is required' });
return;
}
if (sessionCustomName.length > 500) {
res
.status(400)
.json({ error: 'sessionCustomName must not exceed 500 characters' });
return;
}
updateSessionNameById(sessionId, sessionCustomName);
res.json({ success: true, sessionId, sessionCustomName });
} catch (error) {
const message =
error instanceof Error ? error.message : 'Failed to update session name';
const statusCode = message.toLowerCase().includes('not found') ? 404 : 500;
res.status(statusCode).json({ error: message });
}
},
);
router.delete(
'/api/sidebar/delete-workspace',
authenticateToken,
async (req: Request, res: Response): Promise<void> => {
try {
const workspacePath = getWorkspacePathFromBody(req);
if (!workspacePath) {
res.status(400).json({ error: 'workspacePath is required' });
return;
}
const result = await deleteWorkspaceByPath(workspacePath);
res.json({
success: true,
workspacePath,
...result,
});
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to delete workspace';
res.status(500).json({ error: message });
}
},
);
router.delete(
'/api/sidebar/delete-session',
authenticateToken,
async (req: Request, res: Response): Promise<void> => {
try {
const sessionId = getTrimmedString(req.body?.sessionId);
if (!sessionId) {
res.status(400).json({ error: 'sessionId is required' });
return;
}
const result = await deleteSessionById(sessionId);
if (!result.deleted) {
res.status(404).json({ error: 'Session not found' });
return;
}
res.json({
success: true,
sessionId,
...result,
});
} catch (error) {
const message = error instanceof Error ? error.message : 'Failed to delete session';
res.status(500).json({ error: message });
}
},
);
export default router;

View File

@@ -1,247 +0,0 @@
import path from 'node:path';
import { deleteClaudeSession } from '@/modules/providers/claude/claude.session-processor.js';
import { deleteCodexSession } from '@/modules/providers/codex/codex.session-processor.js';
import { deleteCursorSession } from '@/modules/providers/cursor/cursor.session-processor.js';
import { deleteGeminiSession } from '@/modules/providers/gemini/gemini.session-processor.js';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { workspaceOriginalPathsDb } from '@/shared/database/repositories/workspace-original-paths.db.js';
import type { SessionsRow } from '@/shared/database/types.js';
export type SidebarSessionRecord = {
sessionId: string;
id: string;
provider: SessionsRow['provider'];
customName: string | null;
summary: string;
workspacePath: string;
createdAt: string | null;
updatedAt: string | null;
lastActivity: string | null;
};
export type SidebarWorkspaceRecord = {
workspaceOriginalPath: string;
workspaceCustomName: string | null;
workspaceDisplayName: string;
isStarred: boolean;
lastActivity: string | null;
sessions: SidebarSessionRecord[];
};
export type DeleteSessionResult = {
deleted: boolean;
jsonlDeleted: boolean;
};
export type DeleteWorkspaceResult = {
deletedWorkspace: boolean;
deletedSessionCount: number;
jsonlDeletedCount: number;
failedSessionFileDeletes: string[];
};
type SessionDeletionTarget = Pick<SessionsRow, 'session_id' | 'provider' | 'workspace_path' | 'created_at'>;
const parseTimestamp = (timestamp: string | null | undefined): number => {
if (!timestamp) {
return 0;
}
// SQLite CURRENT_TIMESTAMP is UTC but stored without timezone ("YYYY-MM-DD HH:MM:SS").
// Normalize this format so parsing is always timezone-correct.
const sqliteUtcPattern = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/;
const normalizedTimestamp = sqliteUtcPattern.test(timestamp)
? `${timestamp.replace(' ', 'T')}Z`
: timestamp;
const parsed = new Date(normalizedTimestamp).getTime();
return Number.isFinite(parsed) ? parsed : 0;
};
const toSidebarSessionRecord = (session: SessionsRow): SidebarSessionRecord => {
const lastActivity = session.updated_at || session.created_at || null;
return {
sessionId: session.session_id,
id: session.session_id,
provider: session.provider,
customName: session.custom_name,
summary: session.custom_name || 'Untitled Session',
workspacePath: session.workspace_path,
createdAt: session.created_at || null,
updatedAt: session.updated_at || null,
lastActivity,
};
};
const sortSessionsByLastActivity = (sessions: SidebarSessionRecord[]): SidebarSessionRecord[] =>
[...sessions].sort((left, right) => {
const timestampDifference =
parseTimestamp(right.lastActivity) - parseTimestamp(left.lastActivity);
if (timestampDifference !== 0) {
return timestampDifference;
}
return right.sessionId.localeCompare(left.sessionId);
});
const sortWorkspacesByLastActivity = (
workspaces: SidebarWorkspaceRecord[],
): SidebarWorkspaceRecord[] =>
[...workspaces].sort((left, right) => {
const timestampDifference =
parseTimestamp(right.lastActivity) - parseTimestamp(left.lastActivity);
if (timestampDifference !== 0) {
return timestampDifference;
}
return left.workspaceDisplayName.localeCompare(right.workspaceDisplayName);
});
const deleteSessionFileByProvider = async (
session: SessionDeletionTarget,
): Promise<boolean> => {
switch (session.provider) {
case 'claude':
return deleteClaudeSession(session.session_id, session.workspace_path);
case 'codex':
return deleteCodexSession(session.session_id, session.created_at);
case 'cursor':
return deleteCursorSession(session.session_id, session.workspace_path);
case 'gemini':
return deleteGeminiSession(session.session_id);
default:
return false;
}
};
export const getWorkspaceSessionsCollection = (): SidebarWorkspaceRecord[] => {
const workspaceRows = workspaceOriginalPathsDb.getWorkspacePaths();
const sessionRows = sessionsDb.getAllSessions();
const sessionsByWorkspace = new Map<string, SidebarSessionRecord[]>();
// Build grouped sessions once to keep the response shape deterministic.
for (const sessionRow of sessionRows) {
const existing = sessionsByWorkspace.get(sessionRow.workspace_path) || [];
existing.push(toSidebarSessionRecord(sessionRow));
sessionsByWorkspace.set(sessionRow.workspace_path, existing);
}
const workspaceRecords = workspaceRows.map((workspaceRow) => {
const sessions = sortSessionsByLastActivity(
sessionsByWorkspace.get(workspaceRow.workspace_path) || [],
);
const lastActivity = sessions[0]?.lastActivity || null;
return {
workspaceOriginalPath: workspaceRow.workspace_path,
workspaceCustomName: workspaceRow.custom_workspace_name,
workspaceDisplayName:
workspaceRow.custom_workspace_name ||
path.basename(workspaceRow.workspace_path) ||
workspaceRow.workspace_path,
isStarred: workspaceRow.isStarred === 1,
lastActivity,
sessions,
};
});
return sortWorkspacesByLastActivity(workspaceRecords);
};
export const updateWorkspaceStarByPath = (workspacePath: string): boolean => {
const workspaceRow = workspaceOriginalPathsDb.getWorkspacePath(workspacePath);
if (!workspaceRow) {
throw new Error('Workspace not found');
}
const nextIsStarred = workspaceRow.isStarred !== 1;
workspaceOriginalPathsDb.updateWorkspaceIsStarred(workspacePath, nextIsStarred);
return nextIsStarred;
};
export const updateWorkspaceNameByPath = (
workspacePath: string,
workspaceCustomName: string | null,
): void => {
workspaceOriginalPathsDb.updateCustomWorkspaceName(workspacePath, workspaceCustomName);
};
export const updateSessionNameById = (
sessionId: string,
sessionCustomName: string,
): void => {
const sessionMetadata = sessionsDb.getSessionById(sessionId);
if (!sessionMetadata) {
throw new Error('Session not found');
}
sessionsDb.updateSessionCustomName(sessionId, sessionCustomName);
};
export const deleteSessionById = async (
sessionId: string,
): Promise<DeleteSessionResult> => {
const sessionMetadata = sessionsDb.getSessionById(sessionId);
if (!sessionMetadata) {
return {
deleted: false,
jsonlDeleted: false,
};
}
const jsonlDeleted = await deleteSessionFileByProvider({
session_id: sessionMetadata.session_id,
provider: sessionMetadata.provider,
workspace_path: sessionMetadata.workspace_path,
created_at: sessionMetadata.created_at,
});
sessionsDb.deleteSession(sessionId);
return {
deleted: true,
jsonlDeleted,
};
};
export const deleteWorkspaceByPath = async (
workspacePath: string,
): Promise<DeleteWorkspaceResult> => {
const sessionRows = sessionsDb.getSessionsByWorkspacePath(workspacePath);
const failedSessionFileDeletes: string[] = [];
let jsonlDeletedCount = 0;
// Remove all session files first, then clean up DB rows.
for (const sessionRow of sessionRows) {
try {
const deleted = await deleteSessionFileByProvider({
session_id: sessionRow.session_id,
provider: sessionRow.provider,
workspace_path: sessionRow.workspace_path,
created_at: sessionRow.created_at,
});
if (deleted) {
jsonlDeletedCount += 1;
}
} catch {
failedSessionFileDeletes.push(sessionRow.session_id);
} finally {
sessionsDb.deleteSession(sessionRow.session_id);
}
}
workspaceOriginalPathsDb.deleteWorkspacePath(workspacePath);
return {
deletedWorkspace: true,
deletedSessionCount: sessionRows.length,
jsonlDeletedCount,
failedSessionFileDeletes,
};
};

View File

@@ -0,0 +1,97 @@
import express, { type NextFunction, type Request, type Response } from 'express';
import { asyncHandler } from '@/shared/http/async-handler.js';
import { AppError } from '@/shared/utils/app-error.js';
import { createApiErrorResponse, createApiSuccessResponse } from '@/shared/http/api-response.js';
import { logger } from '@/shared/utils/logger.js';
import { workspaceService } from '@/modules/workspaces/workspaces.service.js';
const router = express.Router();
const getTrimmedString = (value: unknown): string => {
if (typeof value !== 'string') {
return '';
}
return value.trim();
};
const parseWorkspacePathFromBody = (req: Request): string => {
const body = req.body as Record<string, unknown> | undefined;
const workspacePath = getTrimmedString(body?.workspacePath);
if (!workspacePath) {
throw new AppError('workspacePath is required.', {
code: 'WORKSPACE_PATH_REQUIRED',
statusCode: 400,
});
}
return workspacePath;
};
const parseWorkspaceCustomNameFromBody = (req: Request): string | null => {
const body = req.body as Record<string, unknown> | undefined;
const customName = getTrimmedString(body?.workspaceCustomName);
return customName || null;
};
router.get(
'/',
asyncHandler(async (_req: Request, res: Response) => {
const workspaces = workspaceService.listWorkspaces();
res.json(createApiSuccessResponse({ workspaces }));
}),
);
router.patch(
'/star',
asyncHandler(async (req: Request, res: Response) => {
const workspacePath = parseWorkspacePathFromBody(req);
const isStarred = workspaceService.toggleWorkspaceStar(workspacePath);
res.json(createApiSuccessResponse({ workspacePath, isStarred }));
}),
);
router.patch(
'/name',
asyncHandler(async (req: Request, res: Response) => {
const workspacePath = parseWorkspacePathFromBody(req);
const workspaceCustomName = parseWorkspaceCustomNameFromBody(req);
workspaceService.updateWorkspaceCustomName(workspacePath, workspaceCustomName);
res.json(createApiSuccessResponse({ workspacePath, workspaceCustomName }));
}),
);
router.delete(
'/',
asyncHandler(async (req: Request, res: Response) => {
const workspacePath = parseWorkspacePathFromBody(req);
const result = await workspaceService.deleteWorkspace(workspacePath);
res.json(createApiSuccessResponse(result));
}),
);
/**
* Normalizes route-level failures to a consistent JSON API shape.
*/
router.use((error: unknown, _req: Request, res: Response, _next: NextFunction) => {
if (res.headersSent) {
return;
}
if (error instanceof AppError) {
res
.status(error.statusCode)
.json(createApiErrorResponse(error.code, error.message, undefined, error.details));
return;
}
const message = error instanceof Error ? error.message : 'Unexpected workspaces route failure.';
logger.error(message, {
module: 'workspaces.routes',
});
res.status(500).json(createApiErrorResponse('INTERNAL_ERROR', message));
});
export default router;

View File

@@ -0,0 +1,188 @@
import path from 'node:path';
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import { workspaceOriginalPathsDb } from '@/shared/database/repositories/workspace-original-paths.db.js';
import type { SessionsRow } from '@/shared/database/types.js';
import { AppError } from '@/shared/utils/app-error.js';
export type WorkspaceSessionRecord = {
sessionId: string;
id: string;
provider: SessionsRow['provider'];
customName: string | null;
summary: string;
workspacePath: string;
createdAt: string | null;
updatedAt: string | null;
lastActivity: string | null;
};
export type WorkspaceRecord = {
workspaceOriginalPath: string;
workspaceCustomName: string | null;
workspaceDisplayName: string;
isStarred: boolean;
lastActivity: string | null;
sessions: WorkspaceSessionRecord[];
};
const parseTimestamp = (timestamp: string | null | undefined): number => {
if (!timestamp) {
return 0;
}
// SQLite CURRENT_TIMESTAMP is UTC but stored without timezone ("YYYY-MM-DD HH:MM:SS").
// Normalize this format so parsing is always timezone-correct.
const sqliteUtcPattern = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/;
const normalizedTimestamp = sqliteUtcPattern.test(timestamp)
? `${timestamp.replace(' ', 'T')}Z`
: timestamp;
const parsed = new Date(normalizedTimestamp).getTime();
return Number.isFinite(parsed) ? parsed : 0;
};
const toWorkspaceSessionRecord = (session: SessionsRow): WorkspaceSessionRecord => {
const lastActivity = session.updated_at || session.created_at || null;
return {
sessionId: session.session_id,
id: session.session_id,
provider: session.provider,
customName: session.custom_name,
summary: session.custom_name || 'Untitled Session',
workspacePath: session.workspace_path,
createdAt: session.created_at || null,
updatedAt: session.updated_at || null,
lastActivity,
};
};
const sortSessionsByLastActivity = (sessions: WorkspaceSessionRecord[]): WorkspaceSessionRecord[] =>
[...sessions].sort((left, right) => {
const timestampDifference =
parseTimestamp(right.lastActivity) - parseTimestamp(left.lastActivity);
if (timestampDifference !== 0) {
return timestampDifference;
}
return right.sessionId.localeCompare(left.sessionId);
});
const sortWorkspacesByLastActivity = (
workspaces: WorkspaceRecord[],
): WorkspaceRecord[] =>
[...workspaces].sort((left, right) => {
const timestampDifference =
parseTimestamp(right.lastActivity) - parseTimestamp(left.lastActivity);
if (timestampDifference !== 0) {
return timestampDifference;
}
return left.workspaceDisplayName.localeCompare(right.workspaceDisplayName);
});
/**
* Groups indexed sessions by workspace and returns a deterministic catalog shape.
*/
const buildWorkspaceSessionCollection = (): WorkspaceRecord[] => {
const workspaceRows = workspaceOriginalPathsDb.getWorkspacePaths();
const sessionRows = sessionsDb.getAllSessions();
const sessionsByWorkspace = new Map<string, WorkspaceSessionRecord[]>();
// Build grouped sessions once to keep the response shape deterministic.
for (const sessionRow of sessionRows) {
const existing = sessionsByWorkspace.get(sessionRow.workspace_path) || [];
existing.push(toWorkspaceSessionRecord(sessionRow));
sessionsByWorkspace.set(sessionRow.workspace_path, existing);
}
const workspaceRecords = workspaceRows.map((workspaceRow) => {
const sessions = sortSessionsByLastActivity(
sessionsByWorkspace.get(workspaceRow.workspace_path) || [],
);
const lastActivity = sessions[0]?.lastActivity || null;
return {
workspaceOriginalPath: workspaceRow.workspace_path,
workspaceCustomName: workspaceRow.custom_workspace_name,
workspaceDisplayName:
workspaceRow.custom_workspace_name ||
path.basename(workspaceRow.workspace_path) ||
workspaceRow.workspace_path,
isStarred: workspaceRow.isStarred === 1,
lastActivity,
sessions,
};
});
return sortWorkspacesByLastActivity(workspaceRecords);
};
/**
* Workspace catalog facade consumed by HTTP routes.
*/
export const workspaceService = {
listWorkspaces(): WorkspaceRecord[] {
return buildWorkspaceSessionCollection();
},
toggleWorkspaceStar(workspacePath: string): boolean {
const workspaceRow = workspaceOriginalPathsDb.getWorkspacePath(workspacePath);
if (!workspaceRow) {
throw new AppError('Workspace not found.', {
code: 'WORKSPACE_NOT_FOUND',
statusCode: 404,
});
}
const nextIsStarred = workspaceRow.isStarred !== 1;
workspaceOriginalPathsDb.updateWorkspaceIsStarred(workspacePath, nextIsStarred);
return nextIsStarred;
},
updateWorkspaceCustomName(workspacePath: string, workspaceCustomName: string | null): void {
workspaceOriginalPathsDb.updateCustomWorkspaceName(workspacePath, workspaceCustomName);
},
async deleteWorkspace(workspacePath: string): Promise<{
workspacePath: string;
deletedWorkspace: boolean;
deletedSessionCount: number;
jsonlDeletedCount: number;
failedSessionFileDeletes: string[];
}> {
const sessionRows = sessionsDb.getSessionsByWorkspacePath(workspacePath);
const failedSessionFileDeletes: string[] = [];
let jsonlDeletedCount = 0;
// Remove all session files first, then clean up DB rows.
for (const sessionRow of sessionRows) {
try {
const deletionResult = await llmSessionsService.deleteSessionArtifacts(
sessionRow.session_id,
);
if (deletionResult.deletedFromDisk) {
jsonlDeletedCount += 1;
}
} catch {
failedSessionFileDeletes.push(sessionRow.session_id);
}
}
workspaceOriginalPathsDb.deleteWorkspacePath(workspacePath);
return {
workspacePath,
deletedWorkspace: true,
deletedSessionCount: sessionRows.length,
jsonlDeletedCount,
failedSessionFileDeletes,
};
},
};

View File

@@ -8,7 +8,7 @@ import { dirname } from 'path';
import { fileURLToPath } from 'url';
import { initializeDatabase } from '@/shared/database/init-db.js';
import { initializeWatcher } from '@/modules/sessions/sessions.watcher.js';
import { initializeWatcher } from '@/modules/llm/sessions.watcher.js';
import { configureWebPush } from '@/modules/push-sub/push-sub.services.js';
import { getConnectableHost } from '@/shared/utils/networkHosts.js';
import { logger } from '@/shared/utils/logger.js';
@@ -61,10 +61,11 @@ const [
geminiRoutes,
pluginsRoutes,
messagesRoutes,
sidebarRoutes,
conversationsRoutes,
workspacesRoutes,
projectsInlineRoutes,
filesRoutes,
sessionsInlineRoutes,
llmRoutes,
] = await Promise.all([
importRoute('./modules/health/health.routes.js'),
importRoute('./modules/system/system.routes.js'),
@@ -86,10 +87,11 @@ const [
importRoute('./modules/gemini/gemini.routes.js'),
importRoute('./modules/plugins/plugins.routes.js'),
importRoute('./modules/messages/messages.routes.js'),
importRoute('./modules/sidebar/sidebar.routes.js'),
importRoute('./modules/conversations/conversations.routes.js'),
importRoute('./modules/workspaces/workspaces.routes.js'),
importRoute('./modules/projects/projects.inline.routes.js'),
importRoute('./modules/files/files.routes.js'),
importRoute('./modules/sessions/sessions.inline.routes.js'),
importRoute('./modules/llm/llm.routes.js'),
]);
// ---------- MIDDLEWARES ----------------
@@ -176,8 +178,14 @@ app.use('/api/plugins', authenticateToken, pluginsRoutes);
// Unified session messages route (protected)
app.use('/api/sessions', authenticateToken, messagesRoutes);
// Refactored sidebar routes (protected)
app.use(sidebarRoutes);
// Conversation search routes (protected)
app.use('/api/conversations', authenticateToken, conversationsRoutes);
// Workspace catalog routes (protected)
app.use('/api/workspaces', authenticateToken, workspacesRoutes);
// Unified LLM provider API routes (protected)
app.use('/api/llm', authenticateToken, llmRoutes);
// Agent API Routes (uses API key authentication)
app.use('/api/agent', agentRoutes);
@@ -186,7 +194,6 @@ app.use('/api/agent', agentRoutes);
app.use(systemRoutes);
app.use(projectsInlineRoutes);
app.use(filesRoutes);
app.use(sessionsInlineRoutes);
// This matches files found in the root public folder (like api-docs.html when we run `/api-docs.html`).
// If the file is found, it's automatically sent. If it is not, it passes it to the next route checker.