mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-05-16 09:13:36 +00:00
feat: setup unified classes for LLM providers and session processing, add tests for LLM unifier helper functions
This commit is contained in:
338
server/src/modules/llm/llm-unifier.providers.test.ts
Normal file
338
server/src/modules/llm/llm-unifier.providers.test.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import { AppError } from '../../shared/utils/app-error.js';
|
||||
import { llmService } from './llm.service.js';
|
||||
import { CursorProvider } from './providers/cursor.provider.js';
|
||||
import { GeminiProvider } from './providers/gemini.provider.js';
|
||||
import { CodexProvider } from './providers/codex.provider.js';
|
||||
import { ClaudeProvider } from './providers/claude.provider.js';
|
||||
|
||||
const asyncEvents = async function* (events: unknown[]) {
|
||||
for (const event of events) {
|
||||
yield event;
|
||||
}
|
||||
};
|
||||
|
||||
// This test covers Cursor start/resume command construction, including yolo/model/resume flags.
|
||||
test('cursor provider builds start/resume CLI invocations correctly', () => {
|
||||
const provider = new CursorProvider() as any;
|
||||
|
||||
const start = provider.createCliInvocation({
|
||||
prompt: 'build feature',
|
||||
sessionId: 'cursor-session-1',
|
||||
isResume: false,
|
||||
model: 'composer-2',
|
||||
allowYolo: true,
|
||||
workspacePath: '/tmp/workspace',
|
||||
});
|
||||
assert.equal(start.command, 'cursor-agent');
|
||||
assert.deepEqual(start.args, [
|
||||
'--print',
|
||||
'--trust',
|
||||
'--output-format',
|
||||
'stream-json',
|
||||
'--yolo',
|
||||
'--model',
|
||||
'composer-2',
|
||||
'build feature',
|
||||
]);
|
||||
|
||||
const resume = provider.createCliInvocation({
|
||||
prompt: 'continue',
|
||||
sessionId: 'cursor-session-1',
|
||||
isResume: true,
|
||||
workspacePath: '/tmp/workspace',
|
||||
});
|
||||
assert.equal(resume.command, 'cursor-agent');
|
||||
assert.deepEqual(resume.args, [
|
||||
'--print',
|
||||
'--trust',
|
||||
'--output-format',
|
||||
'stream-json',
|
||||
'--resume',
|
||||
'cursor-session-1',
|
||||
'continue',
|
||||
]);
|
||||
});
|
||||
|
||||
// This test covers Cursor model-list parsing, including ANSI stripping and current/default flags.
|
||||
test('cursor provider parses model list output into normalized models', async () => {
|
||||
const provider = new CursorProvider() as any;
|
||||
|
||||
provider.runCommandForOutput = async () => [
|
||||
'\u001b[32mAvailable models\u001b[0m',
|
||||
'auto - Auto (current)',
|
||||
'composer-2-fast - Composer 2 Fast (default)',
|
||||
'Tip: use --model',
|
||||
].join('\n');
|
||||
|
||||
const models = await provider.listModels();
|
||||
assert.equal(models.length, 2);
|
||||
assert.deepEqual(models[0], {
|
||||
value: 'auto',
|
||||
displayName: 'auto',
|
||||
description: 'Auto',
|
||||
current: true,
|
||||
default: false,
|
||||
supportsThinkingModes: false,
|
||||
supportedThinkingModes: [],
|
||||
});
|
||||
assert.equal(models[1].value, 'composer-2-fast');
|
||||
assert.equal(models[1].default, true);
|
||||
});
|
||||
|
||||
// This test covers Gemini start/resume CLI construction and curated model list contract.
|
||||
test('gemini provider builds start/resume CLI invocations and exposes curated models', async () => {
|
||||
const provider = new GeminiProvider() as any;
|
||||
|
||||
const start = provider.createCliInvocation({
|
||||
prompt: 'explain architecture',
|
||||
sessionId: 'gemini-session-1',
|
||||
isResume: false,
|
||||
model: 'gemini-2.5-pro',
|
||||
workspacePath: '/tmp/workspace',
|
||||
});
|
||||
assert.equal(start.command, 'gemini');
|
||||
assert.deepEqual(start.args, [
|
||||
'--prompt',
|
||||
'explain architecture',
|
||||
'--output-format',
|
||||
'stream-json',
|
||||
'--model',
|
||||
'gemini-2.5-pro',
|
||||
]);
|
||||
|
||||
const resume = provider.createCliInvocation({
|
||||
prompt: 'continue',
|
||||
sessionId: 'gemini-session-1',
|
||||
isResume: true,
|
||||
workspacePath: '/tmp/workspace',
|
||||
});
|
||||
assert.deepEqual(resume.args, [
|
||||
'--prompt',
|
||||
'continue',
|
||||
'--output-format',
|
||||
'stream-json',
|
||||
'--resume',
|
||||
'gemini-session-1',
|
||||
]);
|
||||
|
||||
const models = await provider.listModels();
|
||||
assert.ok(models.some((model: { value?: string }) => model.value === 'gemini-2.5-pro'));
|
||||
});
|
||||
|
||||
// This test covers Codex start/resume behavior and abort-controller based stop behavior.
|
||||
test('codex provider start/resume use correct SDK thread methods and stop aborts signal', async () => {
|
||||
const provider = new CodexProvider() as any;
|
||||
|
||||
const calls: Array<{ fn: 'start' | 'resume'; sessionId?: string; options: Record<string, unknown> }> = [];
|
||||
let capturedSignal: AbortSignal | undefined;
|
||||
|
||||
const fakeThread = {
|
||||
async runStreamed(_prompt: string, options?: { signal?: AbortSignal }) {
|
||||
capturedSignal = options?.signal;
|
||||
return { events: asyncEvents([{ type: 'chunk' }]) };
|
||||
},
|
||||
};
|
||||
|
||||
provider.loadCodexSdkModule = async () => ({
|
||||
Codex: class {
|
||||
startThread(options?: Record<string, unknown>) {
|
||||
calls.push({ fn: 'start', options: options ?? {} });
|
||||
return fakeThread;
|
||||
}
|
||||
|
||||
resumeThread(sessionId: string, options?: Record<string, unknown>) {
|
||||
calls.push({ fn: 'resume', sessionId, options: options ?? {} });
|
||||
return fakeThread;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const startExec = await provider.createSdkExecution({
|
||||
prompt: 'start codex',
|
||||
sessionId: 'codex-session-1',
|
||||
isResume: false,
|
||||
model: 'gpt-5.4',
|
||||
thinkingMode: 'high',
|
||||
workspacePath: '/tmp/workspace',
|
||||
});
|
||||
assert.equal(calls[0]?.fn, 'start');
|
||||
assert.equal(calls[0]?.options.model, 'gpt-5.4');
|
||||
assert.equal(calls[0]?.options.modelReasoningEffort, 'high');
|
||||
assert.equal(calls[0]?.options.workingDirectory, '/tmp/workspace');
|
||||
|
||||
assert.equal(await startExec.stop(), true);
|
||||
assert.equal(capturedSignal?.aborted, true);
|
||||
|
||||
await provider.createSdkExecution({
|
||||
prompt: 'resume codex',
|
||||
sessionId: 'codex-session-1',
|
||||
isResume: true,
|
||||
workspacePath: '/tmp/workspace',
|
||||
});
|
||||
assert.equal(calls[1]?.fn, 'resume');
|
||||
assert.equal(calls[1]?.sessionId, 'codex-session-1');
|
||||
});
|
||||
|
||||
// This test covers Codex model-list loading from ~/.codex/models_cache.json and model-shape mapping.
|
||||
test('codex provider reads models_cache.json and maps model metadata', async () => {
|
||||
const provider = new CodexProvider();
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'codex-models-'));
|
||||
const codexDir = path.join(tempRoot, '.codex');
|
||||
await fs.mkdir(codexDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(codexDir, 'models_cache.json'),
|
||||
JSON.stringify({
|
||||
models: [
|
||||
{
|
||||
slug: 'gpt-5.4',
|
||||
display_name: 'GPT-5.4',
|
||||
description: 'Latest frontier agentic coding model.',
|
||||
priority: 1,
|
||||
supported_reasoning_levels: [
|
||||
{ effort: 'low' },
|
||||
{ effort: 'medium' },
|
||||
{ effort: 'high' },
|
||||
],
|
||||
},
|
||||
],
|
||||
}),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const originalHomeDir = os.homedir;
|
||||
(os as any).homedir = () => tempRoot;
|
||||
|
||||
try {
|
||||
const models = await provider.listModels();
|
||||
assert.equal(models.length, 1);
|
||||
assert.equal(models[0]?.value, 'gpt-5.4');
|
||||
assert.equal(models[0]?.default, true);
|
||||
assert.deepEqual(models[0]?.supportedThinkingModes, ['low', 'medium', 'high']);
|
||||
} finally {
|
||||
(os as any).homedir = originalHomeDir;
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// This test covers persisted session-level model/thinking preferences flowing into Codex thread options.
|
||||
test('codex provider applies saved model/thinking preferences on subsequent launch', async () => {
|
||||
const provider = new CodexProvider() as any;
|
||||
let threadOptions: Record<string, unknown> | null = null;
|
||||
|
||||
provider.loadCodexSdkModule = async () => ({
|
||||
Codex: class {
|
||||
startThread(options?: Record<string, unknown>) {
|
||||
threadOptions = options ?? null;
|
||||
return {
|
||||
async runStreamed() {
|
||||
return { events: asyncEvents([]) };
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
resumeThread() {
|
||||
return {
|
||||
async runStreamed() {
|
||||
return { events: asyncEvents([]) };
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
await provider.setSessionModel('codex-pref-1', 'gpt-5.4');
|
||||
await provider.setSessionThinkingMode('codex-pref-1', 'xhigh');
|
||||
|
||||
await provider.launchSession({
|
||||
prompt: 'use stored preferences',
|
||||
sessionId: 'codex-pref-1',
|
||||
});
|
||||
|
||||
assert.ok(threadOptions);
|
||||
assert.equal((threadOptions as { model?: string }).model, 'gpt-5.4');
|
||||
assert.equal((threadOptions as { modelReasoningEffort?: string }).modelReasoningEffort, 'xhigh');
|
||||
});
|
||||
|
||||
// This test covers Claude thinking-level mapping, runtime permission handlers, and model/event normalization.
|
||||
test('claude provider helper mappings match unifier contract', async () => {
|
||||
const provider = new ClaudeProvider() as any;
|
||||
|
||||
assert.equal(provider.resolveClaudeEffort(undefined), 'high');
|
||||
assert.equal(provider.resolveClaudeEffort('low'), 'low');
|
||||
assert.equal(provider.resolveClaudeEffort('not-real'), 'high');
|
||||
|
||||
const allowHandler = provider.resolvePermissionHandler('allow');
|
||||
const denyHandler = provider.resolvePermissionHandler('deny');
|
||||
const askHandler = provider.resolvePermissionHandler('ask');
|
||||
assert.equal(typeof allowHandler, 'function');
|
||||
assert.equal(typeof denyHandler, 'function');
|
||||
assert.equal(askHandler, undefined);
|
||||
|
||||
const allowResult = await allowHandler?.();
|
||||
const denyResult = await denyHandler?.();
|
||||
assert.deepEqual(allowResult, { behavior: 'allow' });
|
||||
assert.equal(denyResult?.behavior, 'deny');
|
||||
|
||||
const mappedModel = provider.mapModelInfo({
|
||||
value: 'default',
|
||||
displayName: 'Default',
|
||||
description: 'Default Claude model',
|
||||
supportsEffort: true,
|
||||
supportedEffortLevels: ['low', 'medium', 'high', 'max'],
|
||||
});
|
||||
assert.equal(mappedModel.value, 'default');
|
||||
assert.equal(mappedModel.supportsThinkingModes, true);
|
||||
assert.deepEqual(mappedModel.supportedThinkingModes, ['low', 'medium', 'high', 'max']);
|
||||
|
||||
const mappedEvent = provider.mapSdkEvent({ type: 'message', subtype: 'delta' });
|
||||
assert.equal(mappedEvent?.message, 'message:delta');
|
||||
});
|
||||
|
||||
// This test covers service-level capability validation for runtime permissions and thinking mode support.
|
||||
test('llmService rejects unsupported runtime permission and thinking mode combinations', async () => {
|
||||
await assert.rejects(
|
||||
llmService.startSession('cursor', {
|
||||
prompt: 'hello',
|
||||
runtimePermissionMode: 'allow',
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'RUNTIME_PERMISSION_NOT_SUPPORTED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
|
||||
await assert.rejects(
|
||||
llmService.startSession('cursor', {
|
||||
prompt: 'hello',
|
||||
thinkingMode: 'high',
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'THINKING_MODE_NOT_SUPPORTED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
});
|
||||
|
||||
// This test covers model/thinking capability gates on providers before any external process/SDK usage.
|
||||
test('providers enforce capability gates for model/thinking updates', async () => {
|
||||
const claudeProvider = new ClaudeProvider();
|
||||
const cursorProvider = new CursorProvider();
|
||||
|
||||
await assert.rejects(
|
||||
cursorProvider.setSessionThinkingMode('cursor-session', 'high'),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'THINKING_MODE_NOT_SUPPORTED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
|
||||
await claudeProvider.setSessionModel('claude-session', 'sonnet');
|
||||
const preference = (claudeProvider as any).getSessionPreference('claude-session');
|
||||
assert.equal(preference.model, 'sonnet');
|
||||
});
|
||||
327
server/src/modules/llm/llm-unifier.sessions.test.ts
Normal file
327
server/src/modules/llm/llm-unifier.sessions.test.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import { AppError } from '../../shared/utils/app-error.js';
|
||||
import { scanStateDb } from '../../shared/database/repositories/scan-state.db.js';
|
||||
import { sessionsDb } from '../../shared/database/repositories/sessions.db.js';
|
||||
import { llmSessionsService } from './sessions.service.js';
|
||||
import { sessionIndexers } from './session-indexers/index.js';
|
||||
import { conversationSearchService } from '../conversations/conversation-search.service.js';
|
||||
import type { ISessionIndexer } from './session-indexers/session-indexer.interface.js';
|
||||
|
||||
const patchMethod = <T extends object, K extends keyof T>(target: T, key: K, replacement: T[K]) => {
|
||||
const original = target[key];
|
||||
(target as any)[key] = replacement;
|
||||
return () => {
|
||||
(target as any)[key] = original;
|
||||
};
|
||||
};
|
||||
|
||||
const patchIndexers = (nextIndexers: ISessionIndexer[]) => {
|
||||
const originalIndexers = [...sessionIndexers];
|
||||
sessionIndexers.splice(0, sessionIndexers.length, ...nextIndexers);
|
||||
return () => {
|
||||
sessionIndexers.splice(0, sessionIndexers.length, ...originalIndexers);
|
||||
};
|
||||
};
|
||||
|
||||
// This test covers multi-provider synchronization orchestration and failure aggregation.
|
||||
test('llmSessionsService.synchronizeSessions aggregates processed counts and failures', { concurrency: false }, async () => {
|
||||
let updateLastScannedAtCalls = 0;
|
||||
const restoreScanDate = patchMethod(scanStateDb, 'getLastScannedAt', () => new Date('2026-04-01T00:00:00.000Z'));
|
||||
const restoreUpdateScanDate = patchMethod(scanStateDb, 'updateLastScannedAt', () => {
|
||||
updateLastScannedAtCalls += 1;
|
||||
});
|
||||
const restoreIndexers = patchIndexers([
|
||||
{
|
||||
provider: 'claude',
|
||||
async synchronize() {
|
||||
return 3;
|
||||
},
|
||||
},
|
||||
{
|
||||
provider: 'codex',
|
||||
async synchronize() {
|
||||
throw new Error('codex index failed');
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
try {
|
||||
const result = await llmSessionsService.synchronizeSessions();
|
||||
assert.equal(result.processedByProvider.claude, 3);
|
||||
assert.equal(result.processedByProvider.codex, 0);
|
||||
assert.equal(result.processedByProvider.cursor, 0);
|
||||
assert.equal(result.processedByProvider.gemini, 0);
|
||||
assert.equal(result.failures.length, 1);
|
||||
assert.equal(result.failures[0], 'codex index failed');
|
||||
assert.equal(updateLastScannedAtCalls, 1);
|
||||
} finally {
|
||||
restoreIndexers();
|
||||
restoreUpdateScanDate();
|
||||
restoreScanDate();
|
||||
}
|
||||
});
|
||||
|
||||
// This test covers provider-specific sync behavior for both incremental and full-rescan modes.
|
||||
test('llmSessionsService.synchronizeProvider honors fullRescan option', { concurrency: false }, async () => {
|
||||
const observedScanDates: Array<Date | null> = [];
|
||||
const restoreScanDate = patchMethod(scanStateDb, 'getLastScannedAt', () => new Date('2026-04-02T00:00:00.000Z'));
|
||||
const restoreUpdateScanDate = patchMethod(scanStateDb, 'updateLastScannedAt', () => {});
|
||||
const restoreIndexers = patchIndexers([
|
||||
{
|
||||
provider: 'cursor',
|
||||
async synchronize(lastScanAt) {
|
||||
observedScanDates.push(lastScanAt);
|
||||
return 7;
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
try {
|
||||
const incremental = await llmSessionsService.synchronizeProvider('cursor');
|
||||
const fullRescan = await llmSessionsService.synchronizeProvider('cursor', { fullRescan: true });
|
||||
|
||||
assert.equal(incremental.provider, 'cursor');
|
||||
assert.equal(incremental.processed, 7);
|
||||
assert.equal(fullRescan.provider, 'cursor');
|
||||
assert.equal(fullRescan.processed, 7);
|
||||
assert.equal(observedScanDates.length, 2);
|
||||
assert.ok(observedScanDates[0] instanceof Date);
|
||||
assert.equal(observedScanDates[1], null);
|
||||
} finally {
|
||||
restoreIndexers();
|
||||
restoreUpdateScanDate();
|
||||
restoreScanDate();
|
||||
}
|
||||
});
|
||||
|
||||
// This test covers session rename persistence and not-found guardrails.
|
||||
test('llmSessionsService.updateSessionCustomName validates existence before updating', { concurrency: false }, () => {
|
||||
let updated: { sessionId: string; customName: string } | null = null;
|
||||
const restoreGetById = patchMethod(sessionsDb, 'getSessionById', (sessionId: string) => (
|
||||
sessionId === 'known-session'
|
||||
? {
|
||||
session_id: 'known-session',
|
||||
provider: 'claude',
|
||||
workspace_path: '/tmp/workspace',
|
||||
jsonl_path: null,
|
||||
created_at: '2026-04-01T00:00:00.000Z',
|
||||
updated_at: '2026-04-01T00:00:00.000Z',
|
||||
}
|
||||
: null
|
||||
));
|
||||
const restoreUpdateName = patchMethod(sessionsDb, 'updateSessionCustomName', (sessionId: string, customName: string) => {
|
||||
updated = { sessionId, customName };
|
||||
});
|
||||
|
||||
try {
|
||||
llmSessionsService.updateSessionCustomName('known-session', 'New Session Name');
|
||||
assert.deepEqual(updated, {
|
||||
sessionId: 'known-session',
|
||||
customName: 'New Session Name',
|
||||
});
|
||||
|
||||
assert.throws(
|
||||
() => llmSessionsService.updateSessionCustomName('missing-session', 'Nope'),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'SESSION_NOT_FOUND' &&
|
||||
error.statusCode === 404,
|
||||
);
|
||||
} finally {
|
||||
restoreUpdateName();
|
||||
restoreGetById();
|
||||
}
|
||||
});
|
||||
|
||||
// This test covers delete behavior using only DB jsonl_path, including invalid id validation.
|
||||
test('llmSessionsService.deleteSessionArtifacts validates ids and deletes disk/db artifacts', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-delete-session-'));
|
||||
const transcriptPath = path.join(tempRoot, 'session.jsonl');
|
||||
await fs.writeFile(transcriptPath, '{"ok":true}\n', 'utf8');
|
||||
|
||||
let deletedSessionId: string | null = null;
|
||||
const restoreGetById = patchMethod(sessionsDb, 'getSessionById', (sessionId: string) => (
|
||||
sessionId === 'session-123'
|
||||
? {
|
||||
session_id: 'session-123',
|
||||
provider: 'cursor',
|
||||
workspace_path: '/tmp/workspace',
|
||||
jsonl_path: transcriptPath,
|
||||
created_at: '2026-04-01T00:00:00.000Z',
|
||||
updated_at: '2026-04-01T00:00:00.000Z',
|
||||
}
|
||||
: null
|
||||
));
|
||||
const restoreDelete = patchMethod(sessionsDb, 'deleteSession', (sessionId: string) => {
|
||||
deletedSessionId = sessionId;
|
||||
});
|
||||
|
||||
try {
|
||||
await assert.rejects(
|
||||
llmSessionsService.deleteSessionArtifacts('../invalid'),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'INVALID_SESSION_ID' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
|
||||
const deleted = await llmSessionsService.deleteSessionArtifacts('session-123');
|
||||
assert.equal(deleted.sessionId, 'session-123');
|
||||
assert.equal(deleted.deletedFromDatabase, true);
|
||||
assert.equal(deleted.deletedFromDisk, true);
|
||||
assert.equal(deletedSessionId, 'session-123');
|
||||
await assert.rejects(fs.access(transcriptPath));
|
||||
|
||||
const missing = await llmSessionsService.deleteSessionArtifacts('session-404');
|
||||
assert.equal(missing.deletedFromDatabase, false);
|
||||
assert.equal(missing.deletedFromDisk, false);
|
||||
} finally {
|
||||
restoreDelete();
|
||||
restoreGetById();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// This test covers session-history parsing for JSONL (including malformed lines) and Gemini JSON files.
|
||||
test('llmSessionsService.getSessionHistory parses JSONL and Gemini JSON correctly', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-history-'));
|
||||
const jsonlPath = path.join(tempRoot, 'session.jsonl');
|
||||
const jsonPath = path.join(tempRoot, 'gemini.json');
|
||||
await fs.writeFile(jsonlPath, '{"message":"hello"}\nnot-json\n', 'utf8');
|
||||
await fs.writeFile(jsonPath, '{"messages":[{"text":"hi"}]}', 'utf8');
|
||||
|
||||
const restoreGetById = patchMethod(sessionsDb, 'getSessionById', (sessionId: string) => {
|
||||
if (sessionId === 'jsonl-session') {
|
||||
return {
|
||||
session_id: 'jsonl-session',
|
||||
provider: 'cursor',
|
||||
workspace_path: '/tmp/workspace',
|
||||
jsonl_path: jsonlPath,
|
||||
created_at: '2026-04-01T00:00:00.000Z',
|
||||
updated_at: '2026-04-01T00:00:00.000Z',
|
||||
};
|
||||
}
|
||||
|
||||
if (sessionId === 'json-session') {
|
||||
return {
|
||||
session_id: 'json-session',
|
||||
provider: 'gemini',
|
||||
workspace_path: '/tmp/workspace',
|
||||
jsonl_path: jsonPath,
|
||||
created_at: '2026-04-01T00:00:00.000Z',
|
||||
updated_at: '2026-04-01T00:00:00.000Z',
|
||||
};
|
||||
}
|
||||
|
||||
if (sessionId === 'missing-history-path') {
|
||||
return {
|
||||
session_id: 'missing-history-path',
|
||||
provider: 'claude',
|
||||
workspace_path: '/tmp/workspace',
|
||||
jsonl_path: null,
|
||||
created_at: '2026-04-01T00:00:00.000Z',
|
||||
updated_at: '2026-04-01T00:00:00.000Z',
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
try {
|
||||
const jsonlHistory = await llmSessionsService.getSessionHistory('jsonl-session');
|
||||
assert.equal(jsonlHistory.fileType, 'jsonl');
|
||||
assert.equal(Array.isArray(jsonlHistory.entries), true);
|
||||
assert.equal(jsonlHistory.entries.length, 2);
|
||||
assert.deepEqual(jsonlHistory.entries[0], { message: 'hello' });
|
||||
assert.deepEqual(jsonlHistory.entries[1], { raw: 'not-json', parseError: true });
|
||||
|
||||
const geminiHistory = await llmSessionsService.getSessionHistory('json-session');
|
||||
assert.equal(geminiHistory.fileType, 'json');
|
||||
assert.equal(geminiHistory.entries.length, 1);
|
||||
assert.deepEqual(geminiHistory.entries[0], { messages: [{ text: 'hi' }] });
|
||||
|
||||
await assert.rejects(
|
||||
llmSessionsService.getSessionHistory('unknown-session'),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'SESSION_NOT_FOUND' &&
|
||||
error.statusCode === 404,
|
||||
);
|
||||
|
||||
await assert.rejects(
|
||||
llmSessionsService.getSessionHistory('missing-history-path'),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'SESSION_HISTORY_NOT_AVAILABLE' &&
|
||||
error.statusCode === 404,
|
||||
);
|
||||
} finally {
|
||||
restoreGetById();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// This test covers conversation search over indexed transcript files with provider/case filters.
|
||||
test('conversationSearchService searches indexed transcripts with provider and case filters', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-search-'));
|
||||
const cursorPath = path.join(tempRoot, 'cursor.jsonl');
|
||||
const codexPath = path.join(tempRoot, 'codex.jsonl');
|
||||
await fs.writeFile(cursorPath, 'hello world\nNeedle lower\n', 'utf8');
|
||||
await fs.writeFile(codexPath, 'HELLO WORLD\nNEEDLE UPPER\n', 'utf8');
|
||||
|
||||
const restoreGetAll = patchMethod(sessionsDb, 'getAllSessions', () => ([
|
||||
{
|
||||
session_id: 'cursor-s',
|
||||
provider: 'cursor',
|
||||
workspace_path: '/tmp/workspace-cursor',
|
||||
jsonl_path: cursorPath,
|
||||
custom_name: null,
|
||||
created_at: '2026-04-01T00:00:00.000Z',
|
||||
updated_at: '2026-04-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
session_id: 'codex-s',
|
||||
provider: 'codex',
|
||||
workspace_path: '/tmp/workspace-codex',
|
||||
jsonl_path: codexPath,
|
||||
custom_name: null,
|
||||
created_at: '2026-04-01T00:00:00.000Z',
|
||||
updated_at: '2026-04-01T00:00:00.000Z',
|
||||
},
|
||||
]));
|
||||
|
||||
try {
|
||||
await assert.rejects(
|
||||
conversationSearchService.search({ query: ' ' }),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'SEARCH_QUERY_REQUIRED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
|
||||
const anyProviderResults = await conversationSearchService.search({
|
||||
query: 'needle',
|
||||
caseSensitive: false,
|
||||
limit: 20,
|
||||
});
|
||||
assert.ok(anyProviderResults.some((entry) => entry.sessionId === 'cursor-s'));
|
||||
assert.ok(anyProviderResults.some((entry) => entry.sessionId === 'codex-s'));
|
||||
|
||||
const codexOnlyResults = await conversationSearchService.search({
|
||||
query: 'NEEDLE',
|
||||
caseSensitive: true,
|
||||
provider: 'codex',
|
||||
limit: 20,
|
||||
});
|
||||
assert.ok(codexOnlyResults.length >= 1);
|
||||
assert.ok(codexOnlyResults.every((entry) => entry.provider === 'codex'));
|
||||
} finally {
|
||||
restoreGetAll();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
42
server/src/modules/llm/llm.registry.ts
Normal file
42
server/src/modules/llm/llm.registry.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { IProvider } from '@/modules/llm/providers/provider.interface.js';
|
||||
import { ClaudeProvider } from '@/modules/llm/providers/claude.provider.js';
|
||||
import { CodexProvider } from '@/modules/llm/providers/codex.provider.js';
|
||||
import { CursorProvider } from '@/modules/llm/providers/cursor.provider.js';
|
||||
import { GeminiProvider } from '@/modules/llm/providers/gemini.provider.js';
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
|
||||
const providers: Record<LLMProvider, IProvider> = {
|
||||
claude: new ClaudeProvider(),
|
||||
codex: new CodexProvider(),
|
||||
cursor: new CursorProvider(),
|
||||
gemini: new GeminiProvider(),
|
||||
};
|
||||
|
||||
/**
|
||||
* Central registry for resolving provider implementations by id.
|
||||
*/
|
||||
export const llmProviderRegistry = {
|
||||
/**
|
||||
* Returns all registered providers.
|
||||
*/
|
||||
listProviders(): IProvider[] {
|
||||
return Object.values(providers);
|
||||
},
|
||||
|
||||
/**
|
||||
* Resolves one provider or throws a typed 400 error.
|
||||
*/
|
||||
resolveProvider(provider: string): IProvider {
|
||||
const key = provider as LLMProvider;
|
||||
const resolvedProvider = providers[key];
|
||||
if (!resolvedProvider) {
|
||||
throw new AppError(`Unsupported provider "${provider}".`, {
|
||||
code: 'UNSUPPORTED_PROVIDER',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return resolvedProvider;
|
||||
},
|
||||
};
|
||||
295
server/src/modules/llm/llm.routes.ts
Normal file
295
server/src/modules/llm/llm.routes.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
import express, { type NextFunction, type Request, type Response } from 'express';
|
||||
|
||||
import { asyncHandler } from '@/shared/http/async-handler.js';
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
import { createApiErrorResponse, createApiSuccessResponse } from '@/shared/http/api-response.js';
|
||||
import { llmService } from '@/modules/llm/llm.service.js';
|
||||
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
|
||||
import { logger } from '@/shared/utils/logger.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* Safely reads an Express path parameter that may arrive as string or string[].
|
||||
*/
|
||||
const readPathParam = (value: unknown, name: string): string => {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (Array.isArray(value) && typeof value[0] === 'string') {
|
||||
return value[0];
|
||||
}
|
||||
|
||||
throw new AppError(`${name} path parameter is invalid.`, {
|
||||
code: 'INVALID_PATH_PARAMETER',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
const normalizeProviderParam = (value: unknown): string =>
|
||||
readPathParam(value, 'provider').trim().toLowerCase();
|
||||
|
||||
/**
|
||||
* Allows callers to block until a launched/resumed session reaches a final state.
|
||||
*/
|
||||
const parseWaitForCompletion = (req: Request): boolean => {
|
||||
const value = (req.body as Record<string, unknown> | undefined)?.waitForCompletion;
|
||||
return value === true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates and normalizes rename payload.
|
||||
*/
|
||||
const parseRenamePayload = (payload: unknown): { summary: string } => {
|
||||
if (!payload || typeof payload !== 'object') {
|
||||
throw new AppError('Request body must be an object.', {
|
||||
code: 'INVALID_REQUEST_BODY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const body = payload as Record<string, unknown>;
|
||||
const summary = typeof body.summary === 'string' ? body.summary.trim() : '';
|
||||
if (!summary) {
|
||||
throw new AppError('summary is required.', {
|
||||
code: 'SUMMARY_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (summary.length > 500) {
|
||||
throw new AppError('summary must not exceed 500 characters.', {
|
||||
code: 'SUMMARY_TOO_LONG',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return { summary };
|
||||
};
|
||||
|
||||
router.get(
|
||||
'/providers',
|
||||
asyncHandler(async (_req: Request, res: Response) => {
|
||||
res.json(createApiSuccessResponse({ providers: llmService.listProviders() }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/providers/:provider/models',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const models = await llmService.listModels(provider);
|
||||
res.json(createApiSuccessResponse({ provider, models }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/providers/:provider/sessions',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const sessions = llmService.listSessions(provider);
|
||||
res.json(createApiSuccessResponse({ provider, sessions }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/providers/:provider/sessions/:sessionId',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const session = llmService.getSession(provider, sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" not found for provider "${provider}".`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
res.json(createApiSuccessResponse({ provider, session }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/providers/:provider/sessions/start',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const snapshot = await llmService.startSession(provider, req.body);
|
||||
|
||||
const waitForCompletion = parseWaitForCompletion(req);
|
||||
if (!waitForCompletion) {
|
||||
res.status(202).json(
|
||||
createApiSuccessResponse({
|
||||
provider,
|
||||
session: snapshot,
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const completedSnapshot = await llmService.waitForSession(provider, snapshot.sessionId);
|
||||
res.json(createApiSuccessResponse({ provider, session: completedSnapshot ?? snapshot }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/providers/:provider/sessions/:sessionId/resume',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
|
||||
const snapshot = await llmService.resumeSession(provider, sessionId, req.body);
|
||||
|
||||
const waitForCompletion = parseWaitForCompletion(req);
|
||||
if (!waitForCompletion) {
|
||||
res.status(202).json(createApiSuccessResponse({ provider, session: snapshot }));
|
||||
return;
|
||||
}
|
||||
|
||||
const completedSnapshot = await llmService.waitForSession(provider, sessionId);
|
||||
res.json(createApiSuccessResponse({ provider, session: completedSnapshot ?? snapshot }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/providers/:provider/sessions/:sessionId/stop',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const stopped = await llmService.stopSession(provider, sessionId);
|
||||
res.json(createApiSuccessResponse({ provider, sessionId, stopped }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/providers/:provider/sessions/:sessionId/model',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const model = typeof req.body?.model === 'string' ? req.body.model.trim() : '';
|
||||
if (!model) {
|
||||
throw new AppError('model is required.', {
|
||||
code: 'MODEL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
await llmService.setSessionModel(provider, sessionId, model);
|
||||
res.json(
|
||||
createApiSuccessResponse({
|
||||
provider,
|
||||
sessionId,
|
||||
model,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
router.patch(
|
||||
'/providers/:provider/sessions/:sessionId/thinking',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const thinkingMode =
|
||||
typeof req.body?.thinkingMode === 'string' ? req.body.thinkingMode.trim() : '';
|
||||
|
||||
if (!thinkingMode) {
|
||||
throw new AppError('thinkingMode is required.', {
|
||||
code: 'THINKING_MODE_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
await llmService.setSessionThinkingMode(provider, sessionId, thinkingMode);
|
||||
res.json(
|
||||
createApiSuccessResponse({
|
||||
provider,
|
||||
sessionId,
|
||||
thinkingMode,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/sessions/:sessionId/history',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const history = await llmSessionsService.getSessionHistory(sessionId);
|
||||
res.json(createApiSuccessResponse(history));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Renames one indexed session by writing the custom summary into DB.
|
||||
*/
|
||||
router.put(
|
||||
'/sessions/:sessionId/rename',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const { summary } = parseRenamePayload(req.body);
|
||||
llmSessionsService.updateSessionCustomName(sessionId, summary);
|
||||
res.json(createApiSuccessResponse({ sessionId, summary }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Returns DB-indexed sessions discovered by the session-processor scan.
|
||||
*/
|
||||
router.get(
|
||||
'/sessions/index',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider =
|
||||
typeof req.query.provider === 'string' ? req.query.provider.trim().toLowerCase() : undefined;
|
||||
const sessions = llmSessionsService.listIndexedSessions(provider);
|
||||
res.json(createApiSuccessResponse({ provider: provider ?? null, sessions }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Triggers provider disk scans and refreshes the shared sessions table.
|
||||
*/
|
||||
router.post(
|
||||
'/sessions/sync',
|
||||
asyncHandler(async (_req: Request, res: Response) => {
|
||||
const syncResult = await llmSessionsService.synchronizeSessions();
|
||||
res.json(createApiSuccessResponse(syncResult));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Deletes provider-specific session artifacts and removes the DB row.
|
||||
*/
|
||||
router.delete(
|
||||
'/sessions/:sessionId',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const result = await llmSessionsService.deleteSessionArtifacts(sessionId);
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Normalizes route-level failures to a consistent JSON API shape.
|
||||
*/
|
||||
router.use((error: unknown, _req: Request, res: Response, _next: NextFunction) => {
|
||||
if (res.headersSent) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (error instanceof AppError) {
|
||||
res
|
||||
.status(error.statusCode)
|
||||
.json(createApiErrorResponse(error.code, error.message, undefined, error.details));
|
||||
return;
|
||||
}
|
||||
|
||||
const message = error instanceof Error ? error.message : 'Unexpected LLM route failure.';
|
||||
logger.error(message, {
|
||||
module: 'llm.routes',
|
||||
});
|
||||
|
||||
res.status(500).json(createApiErrorResponse('INTERNAL_ERROR', message));
|
||||
});
|
||||
|
||||
export default router;
|
||||
180
server/src/modules/llm/llm.service.ts
Normal file
180
server/src/modules/llm/llm.service.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
import { llmProviderRegistry } from '@/modules/llm/llm.registry.js';
|
||||
import type {
|
||||
ProviderModel,
|
||||
ProviderSessionSnapshot,
|
||||
RuntimePermissionMode,
|
||||
StartSessionInput,
|
||||
} from '@/modules/llm/providers/provider.interface.js';
|
||||
|
||||
/**
|
||||
* Converts unknown request values into optional trimmed strings.
|
||||
*/
|
||||
const normalizeOptionalString = (value: unknown): string | undefined => {
|
||||
if (typeof value !== 'string') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = value.trim();
|
||||
return normalized.length > 0 ? normalized : undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates and normalizes runtime permission mode.
|
||||
*/
|
||||
const normalizePermissionMode = (value: unknown): RuntimePermissionMode | undefined => {
|
||||
const normalized = normalizeOptionalString(value);
|
||||
if (!normalized) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (normalized === 'ask' || normalized === 'allow' || normalized === 'deny') {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
throw new AppError(`Unsupported runtimePermissionMode "${normalized}".`, {
|
||||
code: 'INVALID_RUNTIME_PERMISSION_MODE',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Facade over provider implementations with payload validation and capability checks.
|
||||
*/
|
||||
export const llmService = {
|
||||
listProviders(): Array<{
|
||||
id: LLMProvider;
|
||||
family: 'sdk' | 'cli';
|
||||
capabilities: {
|
||||
supportsRuntimePermissionRequests: boolean;
|
||||
supportsThinkingModeControl: boolean;
|
||||
supportsModelSwitching: boolean;
|
||||
supportsSessionResume: boolean;
|
||||
supportsSessionStop: boolean;
|
||||
};
|
||||
}> {
|
||||
return llmProviderRegistry.listProviders().map((provider) => ({
|
||||
id: provider.id,
|
||||
family: provider.family,
|
||||
capabilities: provider.capabilities,
|
||||
}));
|
||||
},
|
||||
|
||||
async listModels(providerName: string): Promise<ProviderModel[]> {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
return provider.listModels();
|
||||
},
|
||||
|
||||
listSessions(providerName: string): ProviderSessionSnapshot[] {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
return provider.listSessions();
|
||||
},
|
||||
|
||||
getSession(providerName: string, sessionId: string): ProviderSessionSnapshot | null {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
return provider.getSession(sessionId);
|
||||
},
|
||||
|
||||
async startSession(providerName: string, payload: unknown): Promise<ProviderSessionSnapshot> {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
const input = parseStartPayload(payload);
|
||||
validateCapabilityContracts(provider.capabilities, input);
|
||||
return provider.launchSession(input);
|
||||
},
|
||||
|
||||
async resumeSession(
|
||||
providerName: string,
|
||||
sessionId: string,
|
||||
payload: unknown,
|
||||
): Promise<ProviderSessionSnapshot> {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
const input = parseStartPayload(payload);
|
||||
validateCapabilityContracts(provider.capabilities, input);
|
||||
return provider.resumeSession({ ...input, sessionId });
|
||||
},
|
||||
|
||||
async waitForSession(providerName: string, sessionId: string): Promise<ProviderSessionSnapshot | null> {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
return provider.waitForSession(sessionId);
|
||||
},
|
||||
|
||||
async stopSession(providerName: string, sessionId: string): Promise<boolean> {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
return provider.stopSession(sessionId);
|
||||
},
|
||||
|
||||
async setSessionModel(providerName: string, sessionId: string, model: string): Promise<void> {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
await provider.setSessionModel(sessionId, model);
|
||||
},
|
||||
|
||||
async setSessionThinkingMode(
|
||||
providerName: string,
|
||||
sessionId: string,
|
||||
thinkingMode: string,
|
||||
): Promise<void> {
|
||||
const provider = llmProviderRegistry.resolveProvider(providerName);
|
||||
await provider.setSessionThinkingMode(sessionId, thinkingMode);
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses and validates session start/resume request payloads.
|
||||
*/
|
||||
function parseStartPayload(payload: unknown): StartSessionInput {
|
||||
if (!payload || typeof payload !== 'object') {
|
||||
throw new AppError('Request body must be an object.', {
|
||||
code: 'INVALID_REQUEST_BODY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const body = payload as Record<string, unknown>;
|
||||
const prompt = normalizeOptionalString(body.prompt);
|
||||
if (!prompt) {
|
||||
throw new AppError('prompt is required.', {
|
||||
code: 'PROMPT_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
prompt,
|
||||
workspacePath: normalizeOptionalString(body.workspacePath),
|
||||
sessionId: normalizeOptionalString(body.sessionId),
|
||||
model: normalizeOptionalString(body.model),
|
||||
thinkingMode: normalizeOptionalString(body.thinkingMode),
|
||||
runtimePermissionMode: normalizePermissionMode(body.runtimePermissionMode),
|
||||
allowYolo: body.allowYolo === true,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Enforces capability contracts before provider invocation.
|
||||
*/
|
||||
function validateCapabilityContracts(
|
||||
capabilities: {
|
||||
supportsRuntimePermissionRequests: boolean;
|
||||
supportsThinkingModeControl: boolean;
|
||||
},
|
||||
input: StartSessionInput,
|
||||
): void {
|
||||
if (
|
||||
input.runtimePermissionMode &&
|
||||
input.runtimePermissionMode !== 'ask' &&
|
||||
!capabilities.supportsRuntimePermissionRequests
|
||||
) {
|
||||
throw new AppError('Runtime permission requests are not supported by this provider.', {
|
||||
code: 'RUNTIME_PERMISSION_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (input.thinkingMode && !capabilities.supportsThinkingModeControl) {
|
||||
throw new AppError('Thinking mode is not supported by this provider.', {
|
||||
code: 'THINKING_MODE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
267
server/src/modules/llm/providers/abstract.provider.ts
Normal file
267
server/src/modules/llm/providers/abstract.provider.ts
Normal file
@@ -0,0 +1,267 @@
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
import type {
|
||||
IProvider,
|
||||
MutableProviderSession,
|
||||
ProviderCapabilities,
|
||||
ProviderExecutionFamily,
|
||||
ProviderModel,
|
||||
ProviderSessionEvent,
|
||||
ProviderSessionSnapshot,
|
||||
StartSessionInput,
|
||||
} from '@/modules/llm/providers/provider.interface.js';
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
|
||||
type SessionPreference = {
|
||||
model?: string;
|
||||
thinkingMode?: string;
|
||||
};
|
||||
|
||||
const MAX_EVENT_BUFFER_SIZE = 2_000;
|
||||
|
||||
/**
|
||||
* Shared provider base for session lifecycle state and capability gating.
|
||||
*/
|
||||
export abstract class AbstractProvider implements IProvider {
|
||||
readonly id: LLMProvider;
|
||||
readonly family: ProviderExecutionFamily;
|
||||
readonly capabilities: ProviderCapabilities;
|
||||
|
||||
protected readonly sessions = new Map<string, MutableProviderSession>();
|
||||
protected readonly sessionPreferences = new Map<string, SessionPreference>();
|
||||
|
||||
protected constructor(
|
||||
id: LLMProvider,
|
||||
family: ProviderExecutionFamily,
|
||||
capabilities: ProviderCapabilities,
|
||||
) {
|
||||
this.id = id;
|
||||
this.family = family;
|
||||
this.capabilities = capabilities;
|
||||
}
|
||||
|
||||
abstract listModels(): Promise<ProviderModel[]>;
|
||||
abstract launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot>;
|
||||
abstract resumeSession(
|
||||
input: StartSessionInput & { sessionId: string },
|
||||
): Promise<ProviderSessionSnapshot>;
|
||||
|
||||
/**
|
||||
* Returns one in-memory session snapshot when present.
|
||||
*/
|
||||
getSession(sessionId: string): ProviderSessionSnapshot | null {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.toSnapshot(session);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns snapshots of all in-memory sessions.
|
||||
*/
|
||||
listSessions(): ProviderSessionSnapshot[] {
|
||||
return [...this.sessions.values()].map((session) => this.toSnapshot(session));
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for a running session to complete and returns the final snapshot.
|
||||
*/
|
||||
async waitForSession(sessionId: string): Promise<ProviderSessionSnapshot | null> {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
return null;
|
||||
}
|
||||
|
||||
await session.completion;
|
||||
return this.toSnapshot(session);
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests a graceful session stop.
|
||||
*/
|
||||
async stopSession(sessionId: string): Promise<boolean> {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const stopped = await session.stop();
|
||||
if (stopped && session.status === 'running') {
|
||||
this.updateSessionStatus(session, 'stopped');
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'system',
|
||||
message: 'Session stop requested.',
|
||||
});
|
||||
}
|
||||
|
||||
return stopped;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates/supports model switching and updates both live and persisted state.
|
||||
*/
|
||||
async setSessionModel(sessionId: string, model: string): Promise<void> {
|
||||
if (!this.capabilities.supportsModelSwitching) {
|
||||
throw new AppError(`Provider "${this.id}" does not support model switching.`, {
|
||||
code: 'MODEL_SWITCH_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const trimmedModel = model.trim();
|
||||
if (!trimmedModel) {
|
||||
throw new AppError('Model cannot be empty.', {
|
||||
code: 'INVALID_MODEL',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (session?.setModel) {
|
||||
await session.setModel(trimmedModel);
|
||||
}
|
||||
|
||||
const currentPreference = this.sessionPreferences.get(sessionId) ?? {};
|
||||
this.sessionPreferences.set(sessionId, { ...currentPreference, model: trimmedModel });
|
||||
|
||||
if (session) {
|
||||
session.model = trimmedModel;
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'system',
|
||||
message: `Model updated to "${trimmedModel}".`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates/supports thinking mode updates and applies them to live/persisted state.
|
||||
*/
|
||||
async setSessionThinkingMode(sessionId: string, thinkingMode: string): Promise<void> {
|
||||
if (!this.capabilities.supportsThinkingModeControl) {
|
||||
throw new AppError(`Provider "${this.id}" does not support thinking mode control.`, {
|
||||
code: 'THINKING_MODE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const trimmedMode = thinkingMode.trim();
|
||||
if (!trimmedMode) {
|
||||
throw new AppError('Thinking mode cannot be empty.', {
|
||||
code: 'INVALID_THINKING_MODE',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (session?.setThinkingMode) {
|
||||
await session.setThinkingMode(trimmedMode);
|
||||
}
|
||||
|
||||
const currentPreference = this.sessionPreferences.get(sessionId) ?? {};
|
||||
this.sessionPreferences.set(sessionId, { ...currentPreference, thinkingMode: trimmedMode });
|
||||
|
||||
if (session) {
|
||||
session.thinkingMode = trimmedMode;
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'system',
|
||||
message: `Thinking mode updated to "${trimmedMode}".`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads saved preferences for resumed sessions.
|
||||
*/
|
||||
protected getSessionPreference(sessionId: string): SessionPreference {
|
||||
return this.sessionPreferences.get(sessionId) ?? {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores session preferences for subsequent resume/start operations.
|
||||
*/
|
||||
protected rememberSessionPreference(sessionId: string, preference: SessionPreference): void {
|
||||
const currentPreference = this.sessionPreferences.get(sessionId) ?? {};
|
||||
this.sessionPreferences.set(sessionId, {
|
||||
...currentPreference,
|
||||
...preference,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates mutable internal session state and registers it in memory.
|
||||
*/
|
||||
protected createSessionRecord(
|
||||
sessionId: string,
|
||||
input: {
|
||||
model?: string;
|
||||
thinkingMode?: string;
|
||||
},
|
||||
): MutableProviderSession {
|
||||
const session: MutableProviderSession = {
|
||||
sessionId,
|
||||
provider: this.id,
|
||||
family: this.family,
|
||||
status: 'running',
|
||||
startedAt: new Date().toISOString(),
|
||||
model: input.model,
|
||||
thinkingMode: input.thinkingMode,
|
||||
events: [],
|
||||
completion: Promise.resolve(),
|
||||
stop: async () => false,
|
||||
};
|
||||
|
||||
this.sessions.set(sessionId, session);
|
||||
this.rememberSessionPreference(sessionId, {
|
||||
model: input.model,
|
||||
thinkingMode: input.thinkingMode,
|
||||
});
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends an event while enforcing the configured ring-buffer size.
|
||||
*/
|
||||
protected appendEvent(session: MutableProviderSession, event: ProviderSessionEvent): void {
|
||||
session.events.push(event);
|
||||
|
||||
if (session.events.length > MAX_EVENT_BUFFER_SIZE) {
|
||||
session.events.splice(0, session.events.length - MAX_EVENT_BUFFER_SIZE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the terminal state for a session.
|
||||
*/
|
||||
protected updateSessionStatus(
|
||||
session: MutableProviderSession,
|
||||
status: MutableProviderSession['status'],
|
||||
error?: string,
|
||||
): void {
|
||||
session.status = status;
|
||||
session.endedAt = new Date().toISOString();
|
||||
session.error = error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts mutable internal session state to an external snapshot.
|
||||
*/
|
||||
protected toSnapshot(session: MutableProviderSession): ProviderSessionSnapshot {
|
||||
return {
|
||||
sessionId: session.sessionId,
|
||||
provider: session.provider,
|
||||
family: session.family,
|
||||
status: session.status,
|
||||
startedAt: session.startedAt,
|
||||
endedAt: session.endedAt,
|
||||
model: session.model,
|
||||
thinkingMode: session.thinkingMode,
|
||||
events: [...session.events],
|
||||
error: session.error,
|
||||
};
|
||||
}
|
||||
}
|
||||
284
server/src/modules/llm/providers/base-cli.provider.ts
Normal file
284
server/src/modules/llm/providers/base-cli.provider.ts
Normal file
@@ -0,0 +1,284 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { once } from 'node:events';
|
||||
import type { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
|
||||
import spawn from 'cross-spawn';
|
||||
|
||||
import { AbstractProvider } from '@/modules/llm/providers/abstract.provider.js';
|
||||
import type {
|
||||
MutableProviderSession,
|
||||
ProviderCapabilities,
|
||||
ProviderSessionEvent,
|
||||
ProviderSessionSnapshot,
|
||||
StartSessionInput,
|
||||
} from '@/modules/llm/providers/provider.interface.js';
|
||||
import { createStreamLineAccumulator } from '@/shared/platform/stream.js';
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
|
||||
type CreateCliInvocationInput = StartSessionInput & {
|
||||
sessionId: string;
|
||||
isResume: boolean;
|
||||
};
|
||||
|
||||
type CliInvocation = {
|
||||
command: string;
|
||||
args: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string | undefined>;
|
||||
};
|
||||
|
||||
const PROCESS_SHUTDOWN_GRACE_PERIOD_MS = 2_000;
|
||||
|
||||
/**
|
||||
* Base class for CLI-driven providers with streamed stdout/stderr parsing.
|
||||
*/
|
||||
export abstract class BaseCliProvider extends AbstractProvider {
|
||||
protected constructor(providerId: LLMProvider, capabilities: ProviderCapabilities) {
|
||||
super(providerId, 'cli', capabilities);
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a new CLI session and begins process output streaming.
|
||||
*/
|
||||
async launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot> {
|
||||
return this.startSessionInternal({
|
||||
...input,
|
||||
sessionId: input.sessionId ?? randomUUID(),
|
||||
isResume: false,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resumes an existing CLI session and begins process output streaming.
|
||||
*/
|
||||
async resumeSession(input: StartSessionInput & { sessionId: string }): Promise<ProviderSessionSnapshot> {
|
||||
return this.startSessionInternal({
|
||||
...input,
|
||||
isResume: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Implemented by concrete CLI providers to describe command invocation.
|
||||
*/
|
||||
protected abstract createCliInvocation(input: CreateCliInvocationInput): CliInvocation;
|
||||
|
||||
/**
|
||||
* Maps one stdout/stderr line into either JSON or plain-text event shapes.
|
||||
*/
|
||||
protected mapCliOutputLine(line: string, channel: 'stdout' | 'stderr'): ProviderSessionEvent {
|
||||
const parsedJson = this.tryParseJson(line);
|
||||
if (parsedJson !== null) {
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'json',
|
||||
data: parsedJson,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel,
|
||||
message: line,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a one-off CLI command and returns full stdout text on success.
|
||||
*/
|
||||
protected async runCommandForOutput(command: string, args: string[]): Promise<string> {
|
||||
const child = spawn(command, args, {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
cwd: process.cwd(),
|
||||
env: process.env,
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
child.stdout?.on('data', (chunk) => {
|
||||
stdout += chunk.toString();
|
||||
});
|
||||
child.stderr?.on('data', (chunk) => {
|
||||
stderr += chunk.toString();
|
||||
});
|
||||
|
||||
const closePromise = once(child, 'close');
|
||||
const errorPromise = once(child, 'error').then(([error]) => {
|
||||
throw error;
|
||||
});
|
||||
|
||||
await Promise.race([closePromise, errorPromise]);
|
||||
|
||||
if ((child.exitCode ?? 1) !== 0) {
|
||||
const message = stderr.trim() || `Command "${command}" failed with code ${child.exitCode}`;
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
return stdout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Boots one CLI child process and wires stream handlers to the session buffer.
|
||||
*/
|
||||
private async startSessionInternal(input: CreateCliInvocationInput): Promise<ProviderSessionSnapshot> {
|
||||
const preferred = this.getSessionPreference(input.sessionId);
|
||||
const effectiveModel = input.model ?? preferred.model;
|
||||
const effectiveThinking = input.thinkingMode ?? preferred.thinkingMode;
|
||||
|
||||
const session = this.createSessionRecord(input.sessionId, {
|
||||
model: effectiveModel,
|
||||
thinkingMode: effectiveThinking,
|
||||
});
|
||||
|
||||
const invocation = this.createCliInvocation({
|
||||
...input,
|
||||
model: effectiveModel,
|
||||
thinkingMode: effectiveThinking,
|
||||
});
|
||||
|
||||
const child = spawn(invocation.command, invocation.args, {
|
||||
cwd: invocation.cwd ?? input.workspacePath ?? process.cwd(),
|
||||
env: {
|
||||
...process.env,
|
||||
...invocation.env,
|
||||
},
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
}) as ChildProcessWithoutNullStreams;
|
||||
|
||||
const stop = async (): Promise<boolean> => this.terminateChildProcess(child);
|
||||
session.stop = stop;
|
||||
|
||||
const stdoutAccumulator = createStreamLineAccumulator({ preserveEmptyLines: false });
|
||||
const stderrAccumulator = createStreamLineAccumulator({ preserveEmptyLines: false });
|
||||
|
||||
child.stdout.on('data', (chunk) => {
|
||||
const lines = stdoutAccumulator.push(chunk);
|
||||
for (const line of lines) {
|
||||
const event = this.mapCliOutputLine(line, 'stdout');
|
||||
this.appendEvent(session, event);
|
||||
}
|
||||
});
|
||||
|
||||
child.stderr.on('data', (chunk) => {
|
||||
const lines = stderrAccumulator.push(chunk);
|
||||
for (const line of lines) {
|
||||
const event = this.mapCliOutputLine(line, 'stderr');
|
||||
this.appendEvent(session, event);
|
||||
}
|
||||
});
|
||||
|
||||
session.completion = this.waitForCliProcess(
|
||||
session,
|
||||
child,
|
||||
stdoutAccumulator,
|
||||
stderrAccumulator,
|
||||
);
|
||||
return this.toSnapshot(session);
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for process completion/error and marks final session status.
|
||||
*/
|
||||
private async waitForCliProcess(
|
||||
session: MutableProviderSession,
|
||||
child: ChildProcessWithoutNullStreams,
|
||||
stdoutAccumulator: { flush: () => string[] },
|
||||
stderrAccumulator: { flush: () => string[] },
|
||||
): Promise<void> {
|
||||
const closePromise = once(child, 'close') as Promise<[number | null, NodeJS.Signals | null]>;
|
||||
const errorPromise = once(child, 'error') as Promise<[Error]>;
|
||||
const raceResult = await Promise.race([
|
||||
closePromise.then((result) => ({ type: 'close' as const, result })),
|
||||
errorPromise.then((result) => ({ type: 'error' as const, result })),
|
||||
]);
|
||||
|
||||
const pendingStdout = stdoutAccumulator.flush();
|
||||
const pendingStderr = stderrAccumulator.flush();
|
||||
|
||||
for (const line of pendingStdout) {
|
||||
this.appendEvent(session, this.mapCliOutputLine(line, 'stdout'));
|
||||
}
|
||||
|
||||
for (const line of pendingStderr) {
|
||||
this.appendEvent(session, this.mapCliOutputLine(line, 'stderr'));
|
||||
}
|
||||
|
||||
if (raceResult.type === 'error') {
|
||||
const [error] = raceResult.result;
|
||||
const message = error.message || 'CLI process failed before start.';
|
||||
this.updateSessionStatus(session, 'failed', message);
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'error',
|
||||
message,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const [code, signal] = raceResult.result;
|
||||
|
||||
if (session.status === 'stopped') {
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'system',
|
||||
message: `Session stopped (${signal ?? 'SIGTERM'}).`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (code === 0) {
|
||||
this.updateSessionStatus(session, 'completed');
|
||||
return;
|
||||
}
|
||||
|
||||
const message = `CLI command exited with code ${code ?? 'null'}${signal ? ` (signal: ${signal})` : ''}`;
|
||||
this.updateSessionStatus(session, 'failed', message);
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'error',
|
||||
message,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts graceful termination first, then force-kills when necessary.
|
||||
*/
|
||||
private async terminateChildProcess(child: ChildProcessWithoutNullStreams): Promise<boolean> {
|
||||
if (child.killed || child.exitCode !== null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
child.kill('SIGTERM');
|
||||
await Promise.race([
|
||||
once(child, 'close'),
|
||||
new Promise((resolve) => setTimeout(resolve, PROCESS_SHUTDOWN_GRACE_PERIOD_MS)),
|
||||
]);
|
||||
|
||||
if (child.exitCode === null) {
|
||||
child.kill('SIGKILL');
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Best-effort JSON parser for stream-json providers.
|
||||
*/
|
||||
private tryParseJson(line: string): unknown | null {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || (!trimmed.startsWith('{') && !trimmed.startsWith('['))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(trimmed);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
147
server/src/modules/llm/providers/base-sdk.provider.ts
Normal file
147
server/src/modules/llm/providers/base-sdk.provider.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import { AbstractProvider } from '@/modules/llm/providers/abstract.provider.js';
|
||||
import type {
|
||||
MutableProviderSession,
|
||||
ProviderCapabilities,
|
||||
ProviderSessionEvent,
|
||||
ProviderSessionSnapshot,
|
||||
StartSessionInput,
|
||||
} from '@/modules/llm/providers/provider.interface.js';
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
|
||||
type CreateSdkExecutionInput = StartSessionInput & {
|
||||
sessionId: string;
|
||||
isResume: boolean;
|
||||
};
|
||||
|
||||
type SdkExecution = {
|
||||
stream: AsyncIterable<unknown>;
|
||||
stop: () => Promise<boolean>;
|
||||
setModel?: (model: string) => Promise<void>;
|
||||
setThinkingMode?: (thinkingMode: string) => Promise<void>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Base class for SDK-driven providers with async stream consumption.
|
||||
*/
|
||||
export abstract class BaseSdkProvider extends AbstractProvider {
|
||||
protected constructor(providerId: LLMProvider, capabilities: ProviderCapabilities) {
|
||||
super(providerId, 'sdk', capabilities);
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a new SDK session and begins event streaming.
|
||||
*/
|
||||
async launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot> {
|
||||
return this.startSessionInternal({
|
||||
...input,
|
||||
sessionId: input.sessionId ?? randomUUID(),
|
||||
isResume: false,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resumes an existing SDK session and begins event streaming.
|
||||
*/
|
||||
async resumeSession(input: StartSessionInput & { sessionId: string }): Promise<ProviderSessionSnapshot> {
|
||||
return this.startSessionInternal({
|
||||
...input,
|
||||
isResume: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Implemented by concrete SDK providers to create a running execution.
|
||||
*/
|
||||
protected abstract createSdkExecution(input: CreateSdkExecutionInput): Promise<SdkExecution>;
|
||||
|
||||
/**
|
||||
* Normalizes raw SDK events to the shared event shape.
|
||||
*/
|
||||
protected mapSdkEvent(rawEvent: unknown): ProviderSessionEvent | null {
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'sdk',
|
||||
data: rawEvent,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes one SDK execution and wires it to the internal session record.
|
||||
*/
|
||||
private async startSessionInternal(input: CreateSdkExecutionInput): Promise<ProviderSessionSnapshot> {
|
||||
const preferred = this.getSessionPreference(input.sessionId);
|
||||
const effectiveModel = input.model ?? preferred.model;
|
||||
const effectiveThinking = input.thinkingMode ?? preferred.thinkingMode;
|
||||
|
||||
const session = this.createSessionRecord(input.sessionId, {
|
||||
model: effectiveModel,
|
||||
thinkingMode: effectiveThinking,
|
||||
});
|
||||
|
||||
let execution: SdkExecution;
|
||||
try {
|
||||
execution = await this.createSdkExecution({
|
||||
...input,
|
||||
model: effectiveModel,
|
||||
thinkingMode: effectiveThinking,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to start SDK session';
|
||||
this.updateSessionStatus(session, 'failed', message);
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'error',
|
||||
message,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
session.stop = execution.stop;
|
||||
session.setModel = execution.setModel;
|
||||
session.setThinkingMode = execution.setThinkingMode;
|
||||
|
||||
session.completion = this.consumeStream(session, execution.stream);
|
||||
return this.toSnapshot(session);
|
||||
}
|
||||
|
||||
/**
|
||||
* Drains SDK events until completion/error and updates final status.
|
||||
*/
|
||||
private async consumeStream(
|
||||
session: MutableProviderSession,
|
||||
stream: AsyncIterable<unknown>,
|
||||
): Promise<void> {
|
||||
try {
|
||||
for await (const sdkEvent of stream) {
|
||||
const normalized = this.mapSdkEvent(sdkEvent);
|
||||
if (normalized) {
|
||||
this.appendEvent(session, normalized);
|
||||
}
|
||||
}
|
||||
|
||||
if (session.status === 'running') {
|
||||
this.updateSessionStatus(session, 'completed');
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown SDK execution failure';
|
||||
|
||||
if (session.status === 'stopped') {
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'system',
|
||||
message: 'Session stopped.',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
this.updateSessionStatus(session, 'failed', message);
|
||||
this.appendEvent(session, {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'error',
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
182
server/src/modules/llm/providers/claude.provider.ts
Normal file
182
server/src/modules/llm/providers/claude.provider.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import {
|
||||
query,
|
||||
type CanUseTool,
|
||||
type ModelInfo,
|
||||
type Options,
|
||||
} from '@anthropic-ai/claude-agent-sdk';
|
||||
|
||||
import { BaseSdkProvider } from '@/modules/llm/providers/base-sdk.provider.js';
|
||||
import type {
|
||||
ProviderModel,
|
||||
ProviderSessionEvent,
|
||||
RuntimePermissionMode,
|
||||
StartSessionInput,
|
||||
} from '@/modules/llm/providers/provider.interface.js';
|
||||
|
||||
type ClaudeExecutionInput = StartSessionInput & {
|
||||
sessionId: string;
|
||||
isResume: boolean;
|
||||
};
|
||||
|
||||
const CLAUDE_THINKING_LEVELS = new Set(['low', 'medium', 'high', 'max']);
|
||||
|
||||
/**
|
||||
* Claude SDK provider implementation.
|
||||
*/
|
||||
export class ClaudeProvider extends BaseSdkProvider {
|
||||
constructor() {
|
||||
super('claude', {
|
||||
supportsRuntimePermissionRequests: true,
|
||||
supportsThinkingModeControl: true,
|
||||
supportsModelSwitching: true,
|
||||
supportsSessionResume: true,
|
||||
supportsSessionStop: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves available Claude models from the SDK.
|
||||
*/
|
||||
async listModels(): Promise<ProviderModel[]> {
|
||||
const probe = query({
|
||||
prompt: 'model_probe',
|
||||
options: {
|
||||
permissionMode: 'plan',
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const models = await probe.supportedModels();
|
||||
return models.map((model) => this.mapModelInfo(model));
|
||||
} finally {
|
||||
probe.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Claude SDK query execution for start/resume flows.
|
||||
*/
|
||||
protected async createSdkExecution(input: ClaudeExecutionInput): Promise<{
|
||||
stream: AsyncIterable<unknown>;
|
||||
stop: () => Promise<boolean>;
|
||||
setModel: (model: string) => Promise<void>;
|
||||
}> {
|
||||
const options: Options = {
|
||||
cwd: input.workspacePath,
|
||||
model: input.model,
|
||||
effort: this.resolveClaudeEffort(input.thinkingMode),
|
||||
canUseTool: this.resolvePermissionHandler(input.runtimePermissionMode),
|
||||
};
|
||||
|
||||
if (input.isResume) {
|
||||
options.resume = input.sessionId;
|
||||
} else {
|
||||
options.sessionId = input.sessionId;
|
||||
}
|
||||
|
||||
const queryInstance = query({
|
||||
prompt: input.prompt,
|
||||
options,
|
||||
});
|
||||
|
||||
return {
|
||||
stream: queryInstance,
|
||||
stop: async () => {
|
||||
await queryInstance.interrupt();
|
||||
return true;
|
||||
},
|
||||
setModel: async (model: string) => {
|
||||
await queryInstance.setModel(model);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces compact event metadata for frontend stream rendering.
|
||||
*/
|
||||
protected mapSdkEvent(rawEvent: unknown): ProviderSessionEvent | null {
|
||||
if (typeof rawEvent !== 'object' || rawEvent === null) {
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'sdk',
|
||||
message: String(rawEvent),
|
||||
};
|
||||
}
|
||||
|
||||
const messageType = this.getStringProperty(rawEvent, 'type');
|
||||
const messageSubtype = this.getStringProperty(rawEvent, 'subtype');
|
||||
const message = [messageType, messageSubtype].filter(Boolean).join(':') || 'claude_event';
|
||||
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'sdk',
|
||||
message,
|
||||
data: rawEvent,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes Claude model metadata to the shared model shape.
|
||||
*/
|
||||
private mapModelInfo(model: ModelInfo): ProviderModel {
|
||||
return {
|
||||
value: model.value,
|
||||
displayName: model.displayName,
|
||||
description: model.description,
|
||||
supportsThinkingModes: Boolean(model.supportsEffort),
|
||||
supportedThinkingModes: model.supportedEffortLevels,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps requested thinking mode to Claude effort levels.
|
||||
*/
|
||||
private resolveClaudeEffort(thinkingMode?: string): Options['effort'] {
|
||||
if (!thinkingMode) {
|
||||
return 'high';
|
||||
}
|
||||
|
||||
const normalized = thinkingMode.trim().toLowerCase();
|
||||
if (CLAUDE_THINKING_LEVELS.has(normalized)) {
|
||||
return normalized as Options['effort'];
|
||||
}
|
||||
|
||||
return 'high';
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a runtime permission callback when explicit allow/deny is requested.
|
||||
*/
|
||||
private resolvePermissionHandler(mode?: RuntimePermissionMode): CanUseTool | undefined {
|
||||
if (!mode || mode === 'ask') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (mode === 'allow') {
|
||||
return async () => ({ behavior: 'allow' });
|
||||
}
|
||||
|
||||
return async () => ({
|
||||
behavior: 'deny',
|
||||
message: 'Permission denied by runtime permission mode.',
|
||||
interrupt: false,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads one optional string property from an unknown event object.
|
||||
*/
|
||||
private getStringProperty(value: unknown, key: string): string | undefined {
|
||||
if (!value || typeof value !== 'object') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const record = value as Record<string, unknown>;
|
||||
const rawValue = record[key];
|
||||
if (typeof rawValue !== 'string') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return rawValue;
|
||||
}
|
||||
}
|
||||
171
server/src/modules/llm/providers/codex.provider.ts
Normal file
171
server/src/modules/llm/providers/codex.provider.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import { BaseSdkProvider } from '@/modules/llm/providers/base-sdk.provider.js';
|
||||
import type { ProviderModel, ProviderSessionEvent, StartSessionInput } from '@/modules/llm/providers/provider.interface.js';
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
|
||||
type CodexExecutionInput = StartSessionInput & {
|
||||
sessionId: string;
|
||||
isResume: boolean;
|
||||
};
|
||||
|
||||
type CodexModelCacheEntry = {
|
||||
slug?: string;
|
||||
display_name?: string;
|
||||
description?: string;
|
||||
supported_reasoning_levels?: Array<{
|
||||
effort?: string;
|
||||
description?: string;
|
||||
}>;
|
||||
priority?: number;
|
||||
};
|
||||
|
||||
type CodexSdkClient = {
|
||||
startThread: (options?: Record<string, unknown>) => CodexThread;
|
||||
resumeThread: (sessionId: string, options?: Record<string, unknown>) => CodexThread;
|
||||
};
|
||||
|
||||
type CodexThread = {
|
||||
runStreamed: (
|
||||
prompt: string,
|
||||
options?: {
|
||||
signal?: AbortSignal;
|
||||
},
|
||||
) => Promise<{
|
||||
events: AsyncIterable<unknown>;
|
||||
}>;
|
||||
};
|
||||
|
||||
type CodexSdkModule = {
|
||||
Codex: new () => CodexSdkClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* Codex SDK provider implementation.
|
||||
*/
|
||||
export class CodexProvider extends BaseSdkProvider {
|
||||
constructor() {
|
||||
super('codex', {
|
||||
supportsRuntimePermissionRequests: false,
|
||||
supportsThinkingModeControl: true,
|
||||
supportsModelSwitching: true,
|
||||
supportsSessionResume: true,
|
||||
supportsSessionStop: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads codex models from ~/.codex/models_cache.json.
|
||||
*/
|
||||
async listModels(): Promise<ProviderModel[]> {
|
||||
const modelCachePath = path.join(os.homedir(), '.codex', 'models_cache.json');
|
||||
let content: string;
|
||||
try {
|
||||
content = await readFile(modelCachePath, 'utf8');
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException)?.code;
|
||||
if (code === 'ENOENT') {
|
||||
throw new AppError('Codex model cache was not found. Expected ~/.codex/models_cache.json.', {
|
||||
code: 'CODEX_MODEL_CACHE_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(content) as { models?: CodexModelCacheEntry[] };
|
||||
|
||||
const models = parsed.models ?? [];
|
||||
return models
|
||||
.filter((entry) => Boolean(entry.slug))
|
||||
.map((entry) => ({
|
||||
value: entry.slug as string,
|
||||
displayName: entry.display_name ?? entry.slug ?? 'unknown',
|
||||
description: entry.description,
|
||||
default: entry.priority === 1,
|
||||
supportsThinkingModes: Boolean(entry.supported_reasoning_levels?.length),
|
||||
supportedThinkingModes: entry.supported_reasoning_levels
|
||||
?.map((level) => level.effort)
|
||||
.filter((effort): effort is string => typeof effort === 'string'),
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Codex thread execution and wires abort support.
|
||||
*/
|
||||
protected async createSdkExecution(input: CodexExecutionInput): Promise<{
|
||||
stream: AsyncIterable<unknown>;
|
||||
stop: () => Promise<boolean>;
|
||||
}> {
|
||||
const sdkModule = await this.loadCodexSdkModule();
|
||||
const client = new sdkModule.Codex();
|
||||
|
||||
const threadOptions: Record<string, unknown> = {
|
||||
model: input.model,
|
||||
workingDirectory: input.workspacePath,
|
||||
modelReasoningEffort: input.thinkingMode,
|
||||
};
|
||||
|
||||
const thread = input.isResume
|
||||
? client.resumeThread(input.sessionId, threadOptions)
|
||||
: client.startThread(threadOptions);
|
||||
|
||||
const abortController = new AbortController();
|
||||
const streamedTurn = await thread.runStreamed(input.prompt, {
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
return {
|
||||
stream: streamedTurn.events,
|
||||
stop: async () => {
|
||||
abortController.abort('Session stop requested');
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes Codex stream events into the shared event shape.
|
||||
*/
|
||||
protected mapSdkEvent(rawEvent: unknown): ProviderSessionEvent | null {
|
||||
if (typeof rawEvent !== 'object' || rawEvent === null) {
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'sdk',
|
||||
message: String(rawEvent),
|
||||
};
|
||||
}
|
||||
|
||||
const record = rawEvent as Record<string, unknown>;
|
||||
const message = typeof record.type === 'string' ? record.type : 'codex_event';
|
||||
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
channel: 'sdk',
|
||||
message,
|
||||
data: rawEvent,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Dynamically imports the Codex SDK to support environments where it is optional.
|
||||
*/
|
||||
private async loadCodexSdkModule(): Promise<CodexSdkModule> {
|
||||
try {
|
||||
const sdkModule = (await import('@openai/codex-sdk')) as unknown as CodexSdkModule;
|
||||
if (!sdkModule?.Codex) {
|
||||
throw new Error('Codex SDK did not export "Codex".');
|
||||
}
|
||||
return sdkModule;
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to import Codex SDK';
|
||||
throw new AppError(`Codex SDK is unavailable: ${message}`, {
|
||||
code: 'CODEX_SDK_UNAVAILABLE',
|
||||
statusCode: 503,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
123
server/src/modules/llm/providers/cursor.provider.ts
Normal file
123
server/src/modules/llm/providers/cursor.provider.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { BaseCliProvider } from '@/modules/llm/providers/base-cli.provider.js';
|
||||
import type { ProviderModel, StartSessionInput } from '@/modules/llm/providers/provider.interface.js';
|
||||
|
||||
type CursorExecutionInput = StartSessionInput & {
|
||||
sessionId: string;
|
||||
isResume: boolean;
|
||||
};
|
||||
|
||||
const ANSI_REGEX =
|
||||
// biome-ignore lint/suspicious/noControlCharactersInRegex: ANSI escape stripping.
|
||||
/\u001b\[[0-9;]*m/g;
|
||||
|
||||
/**
|
||||
* Cursor CLI provider implementation.
|
||||
*/
|
||||
export class CursorProvider extends BaseCliProvider {
|
||||
constructor() {
|
||||
super('cursor', {
|
||||
supportsRuntimePermissionRequests: false,
|
||||
supportsThinkingModeControl: false,
|
||||
supportsModelSwitching: true,
|
||||
supportsSessionResume: true,
|
||||
supportsSessionStop: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists cursor models by parsing `cursor-agent --list-models`.
|
||||
*/
|
||||
async listModels(): Promise<ProviderModel[]> {
|
||||
const output = await this.runCommandForOutput('cursor-agent', ['--list-models']);
|
||||
return this.parseModelsOutput(output);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the command invocation for cursor start/resume flows.
|
||||
*/
|
||||
protected createCliInvocation(input: CursorExecutionInput): {
|
||||
command: string;
|
||||
args: string[];
|
||||
cwd?: string;
|
||||
} {
|
||||
const args = ['--print', '--trust', '--output-format', 'stream-json'];
|
||||
|
||||
if (input.allowYolo) {
|
||||
args.push('--yolo');
|
||||
}
|
||||
|
||||
if (input.model) {
|
||||
args.push('--model', input.model);
|
||||
}
|
||||
|
||||
if (input.isResume) {
|
||||
args.push('--resume', input.sessionId);
|
||||
}
|
||||
|
||||
args.push(input.prompt);
|
||||
|
||||
return {
|
||||
command: 'cursor-agent',
|
||||
args,
|
||||
cwd: input.workspacePath,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses full model-list output into normalized model entries.
|
||||
*/
|
||||
private parseModelsOutput(output: string): ProviderModel[] {
|
||||
const models: ProviderModel[] = [];
|
||||
const lines = output.replace(ANSI_REGEX, '').split(/\r?\n/);
|
||||
|
||||
for (const line of lines) {
|
||||
const parsed = this.parseModelLine(line);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
models.push(parsed);
|
||||
}
|
||||
|
||||
return models;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses one cursor model line.
|
||||
*/
|
||||
private parseModelLine(line: string): ProviderModel | null {
|
||||
const trimmed = line.trim();
|
||||
if (
|
||||
!trimmed ||
|
||||
trimmed === 'Available models' ||
|
||||
trimmed.startsWith('Loading models') ||
|
||||
trimmed.startsWith('Tip:')
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const match = trimmed.match(/^(.+?)\s+-\s+(.+)$/);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const value = match[1].trim();
|
||||
const descriptionRaw = match[2].trim();
|
||||
|
||||
const current = /\(current\)/i.test(descriptionRaw);
|
||||
const defaultModel = /\(default\)/i.test(descriptionRaw);
|
||||
const description = descriptionRaw
|
||||
.replace(/\s*\((current|default)\)/gi, '')
|
||||
.replace(/\s{2,}/g, ' ')
|
||||
.trim();
|
||||
|
||||
return {
|
||||
value,
|
||||
displayName: value,
|
||||
description,
|
||||
current,
|
||||
default: defaultModel,
|
||||
supportsThinkingModes: false,
|
||||
supportedThinkingModes: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
66
server/src/modules/llm/providers/gemini.provider.ts
Normal file
66
server/src/modules/llm/providers/gemini.provider.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { BaseCliProvider } from '@/modules/llm/providers/base-cli.provider.js';
|
||||
import type { ProviderModel, StartSessionInput } from '@/modules/llm/providers/provider.interface.js';
|
||||
|
||||
type GeminiExecutionInput = StartSessionInput & {
|
||||
sessionId: string;
|
||||
isResume: boolean;
|
||||
};
|
||||
|
||||
const GEMINI_MODELS: ProviderModel[] = [
|
||||
{ value: 'gemini-3.1-pro-preview', displayName: 'Gemini 3.1 Pro Preview' },
|
||||
{ value: 'gemini-3-pro-preview', displayName: 'Gemini 3 Pro Preview' },
|
||||
{ value: 'gemini-3-flash-preview', displayName: 'Gemini 3 Flash Preview' },
|
||||
{ value: 'gemini-2.5-flash', displayName: 'Gemini 2.5 Flash' },
|
||||
{ value: 'gemini-2.5-pro', displayName: 'Gemini 2.5 Pro' },
|
||||
{ value: 'gemini-2.0-flash-lite', displayName: 'Gemini 2.0 Flash Lite' },
|
||||
{ value: 'gemini-2.0-flash', displayName: 'Gemini 2.0 Flash' },
|
||||
{ value: 'gemini-2.0-pro-exp', displayName: 'Gemini 2.0 Pro Experimental' },
|
||||
{ value: 'gemini-2.0-flash-thinking-exp', displayName: 'Gemini 2.0 Flash Thinking' },
|
||||
];
|
||||
|
||||
/**
|
||||
* Gemini CLI provider implementation.
|
||||
*/
|
||||
export class GeminiProvider extends BaseCliProvider {
|
||||
constructor() {
|
||||
super('gemini', {
|
||||
supportsRuntimePermissionRequests: false,
|
||||
supportsThinkingModeControl: false,
|
||||
supportsModelSwitching: true,
|
||||
supportsSessionResume: true,
|
||||
supportsSessionStop: true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns curated Gemini model options from the refactor doc.
|
||||
*/
|
||||
async listModels(): Promise<ProviderModel[]> {
|
||||
return GEMINI_MODELS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the command invocation for gemini start/resume flows.
|
||||
*/
|
||||
protected createCliInvocation(input: GeminiExecutionInput): {
|
||||
command: string;
|
||||
args: string[];
|
||||
cwd?: string;
|
||||
} {
|
||||
const args = ['--prompt', input.prompt, '--output-format', 'stream-json'];
|
||||
|
||||
if (input.model) {
|
||||
args.push('--model', input.model);
|
||||
}
|
||||
|
||||
if (input.isResume) {
|
||||
args.push('--resume', input.sessionId);
|
||||
}
|
||||
|
||||
return {
|
||||
command: 'gemini',
|
||||
args,
|
||||
cwd: input.workspacePath,
|
||||
};
|
||||
}
|
||||
}
|
||||
103
server/src/modules/llm/providers/provider.interface.ts
Normal file
103
server/src/modules/llm/providers/provider.interface.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
|
||||
export type ProviderExecutionFamily = 'sdk' | 'cli';
|
||||
|
||||
export type ProviderSessionStatus = 'running' | 'completed' | 'failed' | 'stopped';
|
||||
|
||||
export type RuntimePermissionMode = 'ask' | 'allow' | 'deny';
|
||||
|
||||
/**
|
||||
* Advertises optional provider behaviors so route/service code can gate features.
|
||||
*/
|
||||
export type ProviderCapabilities = {
|
||||
supportsRuntimePermissionRequests: boolean;
|
||||
supportsThinkingModeControl: boolean;
|
||||
supportsModelSwitching: boolean;
|
||||
supportsSessionResume: boolean;
|
||||
supportsSessionStop: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Provider model descriptor normalized for frontend consumption.
|
||||
*/
|
||||
export type ProviderModel = {
|
||||
value: string;
|
||||
displayName: string;
|
||||
description?: string;
|
||||
default?: boolean;
|
||||
current?: boolean;
|
||||
supportsThinkingModes?: boolean;
|
||||
supportedThinkingModes?: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Unified in-memory event emitted while a provider session runs.
|
||||
*/
|
||||
export type ProviderSessionEvent = {
|
||||
timestamp: string;
|
||||
channel: 'sdk' | 'stdout' | 'stderr' | 'json' | 'system' | 'error';
|
||||
message?: string;
|
||||
data?: unknown;
|
||||
};
|
||||
|
||||
/**
|
||||
* Common launch/resume payload consumed by all providers.
|
||||
*/
|
||||
export type StartSessionInput = {
|
||||
prompt: string;
|
||||
workspacePath?: string;
|
||||
sessionId?: string;
|
||||
model?: string;
|
||||
thinkingMode?: string;
|
||||
runtimePermissionMode?: RuntimePermissionMode;
|
||||
allowYolo?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Snapshot shape exposed externally for a provider session.
|
||||
*/
|
||||
export type ProviderSessionSnapshot = {
|
||||
sessionId: string;
|
||||
provider: LLMProvider;
|
||||
family: ProviderExecutionFamily;
|
||||
status: ProviderSessionStatus;
|
||||
startedAt: string;
|
||||
endedAt?: string;
|
||||
model?: string;
|
||||
thinkingMode?: string;
|
||||
events: ProviderSessionEvent[];
|
||||
error?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Provider contract that both SDK and CLI families implement.
|
||||
*/
|
||||
export interface IProvider {
|
||||
readonly id: LLMProvider;
|
||||
readonly family: ProviderExecutionFamily;
|
||||
readonly capabilities: ProviderCapabilities;
|
||||
|
||||
listModels(): Promise<ProviderModel[]>;
|
||||
|
||||
launchSession(input: StartSessionInput): Promise<ProviderSessionSnapshot>;
|
||||
resumeSession(input: StartSessionInput & { sessionId: string }): Promise<ProviderSessionSnapshot>;
|
||||
|
||||
stopSession(sessionId: string): Promise<boolean>;
|
||||
setSessionModel(sessionId: string, model: string): Promise<void>;
|
||||
setSessionThinkingMode(sessionId: string, thinkingMode: string): Promise<void>;
|
||||
|
||||
getSession(sessionId: string): ProviderSessionSnapshot | null;
|
||||
listSessions(): ProviderSessionSnapshot[];
|
||||
waitForSession(sessionId: string): Promise<ProviderSessionSnapshot | null>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal mutable session state used by provider base classes.
|
||||
*/
|
||||
export type MutableProviderSession = Omit<ProviderSessionSnapshot, 'events'> & {
|
||||
events: ProviderSessionEvent[];
|
||||
completion: Promise<void>;
|
||||
stop: () => Promise<boolean>;
|
||||
setModel?: (model: string) => Promise<void>;
|
||||
setThinkingMode?: (thinkingMode: string) => Promise<void>;
|
||||
};
|
||||
@@ -0,0 +1,84 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
|
||||
import {
|
||||
buildLookupMap,
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
|
||||
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
workspacePath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Claude transcript artifacts.
|
||||
*/
|
||||
export class ClaudeSessionIndexer implements ISessionIndexer {
|
||||
readonly provider = 'claude' as const;
|
||||
|
||||
/**
|
||||
* Scans ~/.claude projects and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(lastScanAt: Date | null): Promise<number> {
|
||||
const claudeHome = path.join(os.homedir(), '.claude');
|
||||
const nameMap = await buildLookupMap(path.join(claudeHome, 'history.jsonl'), 'sessionId', 'display');
|
||||
const files = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(claudeHome, 'projects'),
|
||||
'.jsonl',
|
||||
lastScanAt,
|
||||
);
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.workspacePath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath,
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Claude JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(
|
||||
filePath: string,
|
||||
nameMap: Map<string, string>,
|
||||
): Promise<ParsedSession | null> {
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, unknown>;
|
||||
const sessionId = typeof data.sessionId === 'string' ? data.sessionId : undefined;
|
||||
const workspacePath = typeof data.cwd === 'string' ? data.cwd : undefined;
|
||||
|
||||
if (!sessionId || !workspacePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
workspacePath,
|
||||
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Claude Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
|
||||
import {
|
||||
buildLookupMap,
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
|
||||
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
workspacePath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Codex transcript artifacts.
|
||||
*/
|
||||
export class CodexSessionIndexer implements ISessionIndexer {
|
||||
readonly provider = 'codex' as const;
|
||||
|
||||
/**
|
||||
* Scans ~/.codex sessions and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(lastScanAt: Date | null): Promise<number> {
|
||||
const codexHome = path.join(os.homedir(), '.codex');
|
||||
const nameMap = await buildLookupMap(path.join(codexHome, 'session_index.jsonl'), 'id', 'thread_name');
|
||||
const files = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(codexHome, 'sessions'),
|
||||
'.jsonl',
|
||||
lastScanAt,
|
||||
);
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath, nameMap);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.workspacePath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath,
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Codex JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(
|
||||
filePath: string,
|
||||
nameMap: Map<string, string>,
|
||||
): Promise<ParsedSession | null> {
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, unknown>;
|
||||
const payload = data.payload as Record<string, unknown> | undefined;
|
||||
const sessionId = typeof payload?.id === 'string' ? payload.id : undefined;
|
||||
const workspacePath = typeof payload?.cwd === 'string' ? payload.cwd : undefined;
|
||||
|
||||
if (!sessionId || !workspacePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
workspacePath,
|
||||
sessionName: normalizeSessionName(nameMap.get(sessionId), 'Untitled Codex Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
import crypto from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
|
||||
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
|
||||
import {
|
||||
extractFirstValidJsonlData,
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
listDirectoryEntriesSafe,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
|
||||
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
workspacePath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Cursor transcript artifacts.
|
||||
*/
|
||||
export class CursorSessionIndexer implements ISessionIndexer {
|
||||
readonly provider = 'cursor' as const;
|
||||
|
||||
/**
|
||||
* Scans Cursor chats and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(lastScanAt: Date | null): Promise<number> {
|
||||
const cursorHome = path.join(os.homedir(), '.cursor');
|
||||
const projectsDir = path.join(cursorHome, 'projects');
|
||||
const projectEntries = await listDirectoryEntriesSafe(projectsDir);
|
||||
const seenWorkspacePaths = new Set<string>();
|
||||
|
||||
let processed = 0;
|
||||
for (const entry of projectEntries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const workerLogPath = path.join(projectsDir, entry.name, 'worker.log');
|
||||
const workspacePath = await this.extractWorkspacePathFromWorkerLog(workerLogPath);
|
||||
if (!workspacePath || seenWorkspacePaths.has(workspacePath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
seenWorkspacePaths.add(workspacePath);
|
||||
const workspaceHash = this.md5(workspacePath);
|
||||
const chatsDir = path.join(cursorHome, 'chats', workspaceHash);
|
||||
const files = await findFilesRecursivelyCreatedAfter(chatsDir, '.jsonl', lastScanAt);
|
||||
|
||||
for (const filePath of files) {
|
||||
const parsed = await this.processSessionFile(filePath);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.workspacePath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath,
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces the same workspace hash Cursor uses in chat directory names.
|
||||
*/
|
||||
private md5(input: string): string {
|
||||
return crypto.createHash('md5').update(input).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts workspace path from Cursor worker.log.
|
||||
*/
|
||||
private async extractWorkspacePathFromWorkerLog(filePath: string): Promise<string | null> {
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath, { encoding: 'utf8' });
|
||||
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const match = line.match(/workspacePath=(.*)$/);
|
||||
const workspacePath = match?.[1]?.trim();
|
||||
if (workspacePath) {
|
||||
lineReader.close();
|
||||
fileStream.close();
|
||||
return workspacePath;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing worker logs are valid for partial/incomplete session data.
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Cursor JSONL session file.
|
||||
*/
|
||||
private async processSessionFile(filePath: string): Promise<ParsedSession | null> {
|
||||
const sessionId = path.basename(filePath, '.jsonl');
|
||||
const grandparentDir = path.dirname(path.dirname(filePath));
|
||||
const workerLogPath = path.join(grandparentDir, 'worker.log');
|
||||
const workspacePath = await this.extractWorkspacePathFromWorkerLog(workerLogPath);
|
||||
|
||||
if (!workspacePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return extractFirstValidJsonlData(filePath, (rawData) => {
|
||||
const data = rawData as Record<string, any>;
|
||||
if (data.role !== 'user') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const text = typeof data.message?.content?.[0]?.text === 'string' ? data.message.content[0].text : '';
|
||||
const firstLine = text.replace(/<\/?user_query>/g, '').trim().split('\n')[0];
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
workspacePath,
|
||||
sessionName: normalizeSessionName(firstLine, 'Untitled Cursor Session'),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,128 @@
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
|
||||
import {
|
||||
findFilesRecursivelyCreatedAfter,
|
||||
normalizeSessionName,
|
||||
readFileTimestamps,
|
||||
} from '@/modules/llm/session-indexers/session-indexer.utils.js';
|
||||
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
|
||||
|
||||
type ParsedSession = {
|
||||
sessionId: string;
|
||||
workspacePath: string;
|
||||
sessionName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Session indexer for Gemini transcript artifacts.
|
||||
*/
|
||||
export class GeminiSessionIndexer implements ISessionIndexer {
|
||||
readonly provider = 'gemini' as const;
|
||||
|
||||
/**
|
||||
* Scans Gemini session JSON files and upserts discovered sessions into DB.
|
||||
*/
|
||||
async synchronize(lastScanAt: Date | null): Promise<number> {
|
||||
const geminiHome = path.join(os.homedir(), '.gemini');
|
||||
const legacySessionFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(geminiHome, 'sessions'),
|
||||
'.json',
|
||||
lastScanAt,
|
||||
);
|
||||
const tempFiles = await findFilesRecursivelyCreatedAfter(
|
||||
path.join(geminiHome, 'tmp'),
|
||||
'.json',
|
||||
lastScanAt,
|
||||
);
|
||||
const files = [...legacySessionFiles, ...tempFiles];
|
||||
|
||||
let processed = 0;
|
||||
for (const filePath of files) {
|
||||
if (
|
||||
filePath.startsWith(path.join(geminiHome, 'tmp')) &&
|
||||
!filePath.includes(`${path.sep}chats${path.sep}`)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsed = await this.processSessionFile(filePath);
|
||||
if (!parsed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = await readFileTimestamps(filePath);
|
||||
sessionsDb.createSession(
|
||||
parsed.sessionId,
|
||||
this.provider,
|
||||
parsed.workspacePath,
|
||||
parsed.sessionName,
|
||||
timestamps.createdAt,
|
||||
timestamps.updatedAt,
|
||||
filePath,
|
||||
);
|
||||
processed += 1;
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts session metadata from one Gemini JSON artifact.
|
||||
*/
|
||||
private async processSessionFile(filePath: string): Promise<ParsedSession | null> {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const data = JSON.parse(content) as Record<string, any>;
|
||||
|
||||
const sessionId =
|
||||
typeof data.sessionId === 'string'
|
||||
? data.sessionId
|
||||
: typeof data.id === 'string'
|
||||
? data.id
|
||||
: undefined;
|
||||
if (!sessionId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let workspacePath = typeof data.projectPath === 'string' ? data.projectPath : '';
|
||||
|
||||
if (!workspacePath && filePath.includes(`${path.sep}chats${path.sep}`)) {
|
||||
const chatsDir = path.dirname(filePath);
|
||||
const workspaceDir = path.dirname(chatsDir);
|
||||
const projectRootPath = path.join(workspaceDir, '.project_root');
|
||||
|
||||
try {
|
||||
const rootContent = await readFile(projectRootPath, 'utf8');
|
||||
workspacePath = rootContent.trim();
|
||||
} catch {
|
||||
// Some Gemini artifacts do not ship a .project_root marker.
|
||||
}
|
||||
}
|
||||
|
||||
if (!workspacePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const messages = Array.isArray(data.messages) ? data.messages : [];
|
||||
const firstMessage = messages[0] as Record<string, any> | undefined;
|
||||
let rawName: string | undefined;
|
||||
|
||||
if (Array.isArray(firstMessage?.content) && typeof firstMessage.content[0]?.text === 'string') {
|
||||
rawName = firstMessage.content[0].text;
|
||||
} else if (typeof firstMessage?.content === 'string') {
|
||||
rawName = firstMessage.content;
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
workspacePath,
|
||||
sessionName: normalizeSessionName(rawName, 'New Gemini Chat'),
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
15
server/src/modules/llm/session-indexers/index.ts
Normal file
15
server/src/modules/llm/session-indexers/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import type { ISessionIndexer } from '@/modules/llm/session-indexers/session-indexer.interface.js';
|
||||
import { ClaudeSessionIndexer } from '@/modules/llm/session-indexers/claude.session-indexer.js';
|
||||
import { CodexSessionIndexer } from '@/modules/llm/session-indexers/codex.session-indexer.js';
|
||||
import { CursorSessionIndexer } from '@/modules/llm/session-indexers/cursor.session-indexer.js';
|
||||
import { GeminiSessionIndexer } from '@/modules/llm/session-indexers/gemini.session-indexer.js';
|
||||
|
||||
/**
|
||||
* Provider-specific session indexers used by the sync orchestrator.
|
||||
*/
|
||||
export const sessionIndexers: ISessionIndexer[] = [
|
||||
new ClaudeSessionIndexer(),
|
||||
new CodexSessionIndexer(),
|
||||
new CursorSessionIndexer(),
|
||||
new GeminiSessionIndexer(),
|
||||
];
|
||||
@@ -0,0 +1,13 @@
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
|
||||
/**
|
||||
* Contract for provider-specific session indexing logic.
|
||||
*/
|
||||
export interface ISessionIndexer {
|
||||
readonly provider: LLMProvider;
|
||||
|
||||
/**
|
||||
* Scans provider session artifacts and upserts discovered sessions into DB.
|
||||
*/
|
||||
synchronize(lastScanAt: Date | null): Promise<number>;
|
||||
}
|
||||
154
server/src/modules/llm/session-indexers/session-indexer.utils.ts
Normal file
154
server/src/modules/llm/session-indexers/session-indexer.utils.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import fs from 'node:fs';
|
||||
import fsp from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
/**
|
||||
* Keeps extracted session names compact and UI-safe.
|
||||
*/
|
||||
export function normalizeSessionName(rawValue: string | undefined, fallback: string): string {
|
||||
const normalized = (rawValue ?? '').replace(/\s+/g, ' ').trim();
|
||||
if (!normalized) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
return normalized.slice(0, 120);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns directory entries or an empty array when the directory does not exist.
|
||||
*/
|
||||
export async function listDirectoryEntriesSafe(
|
||||
directoryPath: string,
|
||||
): Promise<import('node:fs').Dirent[]> {
|
||||
try {
|
||||
return await fsp.readdir(directoryPath, { withFileTypes: true });
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a lookup map from a JSONL index file by extracting a key/value pair per row.
|
||||
* The first occurrence of a key wins so we preserve earliest metadata.
|
||||
*/
|
||||
export async function buildLookupMap(
|
||||
filePath: string,
|
||||
keyField: string,
|
||||
valueField: string,
|
||||
): Promise<Map<string, string>> {
|
||||
const lookup = new Map<string, string>();
|
||||
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(trimmed) as Record<string, unknown>;
|
||||
const key = parsed[keyField];
|
||||
const value = parsed[valueField];
|
||||
|
||||
if (typeof key === 'string' && typeof value === 'string' && !lookup.has(key)) {
|
||||
lookup.set(key, value);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing index files are normal for users who have not used a provider yet.
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively scans for files with a given extension and optionally filters
|
||||
* them to only files created after `lastScanAt`.
|
||||
*/
|
||||
export async function findFilesRecursivelyCreatedAfter(
|
||||
rootDir: string,
|
||||
extension: string,
|
||||
lastScanAt: Date | null,
|
||||
fileList: string[] = [],
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const entries = await fsp.readdir(rootDir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(rootDir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await findFilesRecursivelyCreatedAfter(fullPath, extension, lastScanAt, fileList);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!entry.isFile() || !entry.name.endsWith(extension)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!lastScanAt) {
|
||||
fileList.push(fullPath);
|
||||
continue;
|
||||
}
|
||||
|
||||
const stats = await fsp.stat(fullPath);
|
||||
if (stats.birthtime > lastScanAt) {
|
||||
fileList.push(fullPath);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Missing provider directories should not fail the full sync.
|
||||
}
|
||||
|
||||
return fileList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads JSONL rows until the extractor yields a valid session identity.
|
||||
*/
|
||||
export async function extractFirstValidJsonlData<T>(
|
||||
filePath: string,
|
||||
extractor: (parsedJson: unknown) => T | null | undefined,
|
||||
): Promise<T | null> {
|
||||
try {
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of lineReader) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(trimmed);
|
||||
const extracted = extractor(parsed);
|
||||
if (extracted) {
|
||||
lineReader.close();
|
||||
fileStream.close();
|
||||
return extracted;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed session files and continue scanning.
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads filesystem timestamps for DB metadata fields.
|
||||
*/
|
||||
export async function readFileTimestamps(
|
||||
filePath: string,
|
||||
): Promise<{ createdAt?: string; updatedAt?: string }> {
|
||||
try {
|
||||
const stat = await fsp.stat(filePath);
|
||||
return {
|
||||
createdAt: stat.birthtime.toISOString(),
|
||||
updatedAt: stat.mtime.toISOString(),
|
||||
};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
236
server/src/modules/llm/sessions.service.ts
Normal file
236
server/src/modules/llm/sessions.service.ts
Normal file
@@ -0,0 +1,236 @@
|
||||
import path from 'node:path';
|
||||
import fsp, { readFile } from 'node:fs/promises';
|
||||
|
||||
import { scanStateDb } from '@/shared/database/repositories/scan-state.db.js';
|
||||
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
import { sessionIndexers } from '@/modules/llm/session-indexers/index.js';
|
||||
|
||||
type SyncResult = {
|
||||
processedByProvider: Record<LLMProvider, number>;
|
||||
failures: string[];
|
||||
};
|
||||
|
||||
type SessionHistoryPayload = {
|
||||
sessionId: string;
|
||||
provider: string;
|
||||
workspacePath: string;
|
||||
filePath: string;
|
||||
fileType: 'jsonl' | 'json';
|
||||
entries: unknown[];
|
||||
};
|
||||
|
||||
const SESSION_ID_PATTERN = /^[a-zA-Z0-9._-]{1,120}$/;
|
||||
|
||||
/**
|
||||
* Restricts session IDs before they are used in DB/filesystem operations.
|
||||
*/
|
||||
function sanitizeSessionId(sessionId: string): string {
|
||||
const value = String(sessionId).trim();
|
||||
if (!SESSION_ID_PATTERN.test(value)) {
|
||||
throw new AppError('Invalid session ID format.', {
|
||||
code: 'INVALID_SESSION_ID',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes one file if it exists.
|
||||
*/
|
||||
async function removeFileIfExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fsp.unlink(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses newline-delimited JSON files and preserves malformed lines as raw entries.
|
||||
*/
|
||||
const parseJsonl = (content: string): unknown[] => {
|
||||
const entries: unknown[] = [];
|
||||
const lines = content.split(/\r?\n/);
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
entries.push(JSON.parse(trimmed));
|
||||
} catch {
|
||||
entries.push({ raw: trimmed, parseError: true });
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses JSON files and normalizes object payloads into a single-element array.
|
||||
*/
|
||||
const parseJson = (content: string): unknown[] => {
|
||||
try {
|
||||
const parsed = JSON.parse(content) as unknown;
|
||||
return Array.isArray(parsed) ? parsed : [parsed];
|
||||
} catch {
|
||||
return [{ raw: content, parseError: true }];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Orchestrates provider-specific session indexers and DB-path based cleanup.
|
||||
*/
|
||||
export const llmSessionsService = {
|
||||
/**
|
||||
* Lists indexed sessions from the shared DB, optionally scoped to one provider.
|
||||
*/
|
||||
listIndexedSessions(provider?: string) {
|
||||
const allSessions = sessionsDb.getAllSessions();
|
||||
if (!provider) {
|
||||
return allSessions;
|
||||
}
|
||||
|
||||
return allSessions.filter((session) => session.provider === provider);
|
||||
},
|
||||
|
||||
/**
|
||||
* Runs all provider indexers and updates `scan_state.last_scanned_at`.
|
||||
*/
|
||||
async synchronizeSessions(): Promise<SyncResult> {
|
||||
const lastScanAt = scanStateDb.getLastScannedAt();
|
||||
const processedByProvider: Record<LLMProvider, number> = {
|
||||
claude: 0,
|
||||
codex: 0,
|
||||
cursor: 0,
|
||||
gemini: 0,
|
||||
};
|
||||
const failures: string[] = [];
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
sessionIndexers.map(async (indexer) => ({
|
||||
provider: indexer.provider,
|
||||
processed: await indexer.synchronize(lastScanAt),
|
||||
})),
|
||||
);
|
||||
|
||||
for (const result of results) {
|
||||
if (result.status === 'fulfilled') {
|
||||
processedByProvider[result.value.provider] = result.value.processed;
|
||||
continue;
|
||||
}
|
||||
|
||||
const reason = result.reason instanceof Error ? result.reason.message : String(result.reason);
|
||||
failures.push(reason);
|
||||
}
|
||||
|
||||
scanStateDb.updateLastScannedAt();
|
||||
|
||||
return {
|
||||
processedByProvider,
|
||||
failures,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Runs one provider indexer and updates `scan_state.last_scanned_at`.
|
||||
*/
|
||||
async synchronizeProvider(
|
||||
provider: LLMProvider,
|
||||
options: { fullRescan?: boolean } = {},
|
||||
): Promise<{ provider: LLMProvider; processed: number }> {
|
||||
const indexer = sessionIndexers.find((entry) => entry.provider === provider);
|
||||
if (!indexer) {
|
||||
throw new AppError(`No session indexer registered for provider "${provider}".`, {
|
||||
code: 'SESSION_INDEXER_NOT_FOUND',
|
||||
statusCode: 500,
|
||||
});
|
||||
}
|
||||
|
||||
const lastScanAt = options.fullRescan ? null : scanStateDb.getLastScannedAt();
|
||||
const processed = await indexer.synchronize(lastScanAt);
|
||||
scanStateDb.updateLastScannedAt();
|
||||
|
||||
return { provider, processed };
|
||||
},
|
||||
|
||||
updateSessionCustomName(sessionId: string, sessionCustomName: string): void {
|
||||
const sessionMetadata = sessionsDb.getSessionById(sessionId);
|
||||
if (!sessionMetadata) {
|
||||
throw new AppError('Session not found.', {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
sessionsDb.updateSessionCustomName(sessionId, sessionCustomName);
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes a session artifact using only DB `jsonl_path`, then removes the DB row.
|
||||
*/
|
||||
async deleteSessionArtifacts(rawSessionId: string): Promise<{
|
||||
sessionId: string;
|
||||
deletedFromDisk: boolean;
|
||||
deletedFromDatabase: boolean;
|
||||
}> {
|
||||
const sessionId = sanitizeSessionId(rawSessionId);
|
||||
const existingSession = sessionsDb.getSessionById(sessionId);
|
||||
const jsonlPath = existingSession?.jsonl_path ?? null;
|
||||
const deletedFromDisk = jsonlPath ? await removeFileIfExists(jsonlPath) : false;
|
||||
|
||||
if (existingSession) {
|
||||
sessionsDb.deleteSession(sessionId);
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
deletedFromDisk,
|
||||
deletedFromDatabase: Boolean(existingSession),
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Reads session history directly from `sessions.jsonl_path` without legacy fetchers.
|
||||
*/
|
||||
async getSessionHistory(sessionId: string): Promise<SessionHistoryPayload> {
|
||||
const session = sessionsDb.getSessionById(sessionId);
|
||||
if (!session) {
|
||||
throw new AppError(`Session "${sessionId}" was not found.`, {
|
||||
code: 'SESSION_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
if (!session.jsonl_path) {
|
||||
throw new AppError(`Session "${sessionId}" does not have a history file path.`, {
|
||||
code: 'SESSION_HISTORY_NOT_AVAILABLE',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const filePath = session.jsonl_path;
|
||||
const fileContent = await readFile(filePath, 'utf8');
|
||||
const extension = path.extname(filePath).toLowerCase();
|
||||
const isGeminiJson = session.provider === 'gemini' || extension === '.json';
|
||||
|
||||
return {
|
||||
sessionId: session.session_id,
|
||||
provider: session.provider,
|
||||
workspacePath: session.workspace_path,
|
||||
filePath,
|
||||
fileType: isGeminiJson ? 'json' : 'jsonl',
|
||||
entries: isGeminiJson ? parseJson(fileContent) : parseJsonl(fileContent),
|
||||
};
|
||||
},
|
||||
};
|
||||
131
server/src/modules/llm/sessions.watcher.ts
Normal file
131
server/src/modules/llm/sessions.watcher.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import chokidar from 'chokidar';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
|
||||
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
import { logger } from '@/shared/utils/logger.js';
|
||||
|
||||
// File system watchers for provider project/session folders
|
||||
const PROVIDER_WATCH_PATHS: Array<{ provider: LLMProvider; rootPath: string }> = [
|
||||
{
|
||||
provider: 'claude',
|
||||
rootPath: path.join(os.homedir(), '.claude', 'projects'),
|
||||
},
|
||||
{
|
||||
provider: 'cursor',
|
||||
rootPath: path.join(os.homedir(), '.cursor', 'chats'),
|
||||
},
|
||||
{
|
||||
provider: 'codex',
|
||||
rootPath: path.join(os.homedir(), '.codex', 'sessions'),
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
rootPath: path.join(os.homedir(), '.gemini', 'sessions'),
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
rootPath: path.join(os.homedir(), '.gemini', 'tmp'),
|
||||
},
|
||||
];
|
||||
|
||||
const WATCHER_IGNORED_PATTERNS = [
|
||||
'**/node_modules/**',
|
||||
'**/.git/**',
|
||||
'**/dist/**',
|
||||
'**/build/**',
|
||||
'**/*.tmp',
|
||||
'**/*.swp',
|
||||
'**/.DS_Store',
|
||||
];
|
||||
|
||||
|
||||
const watchers: any[] = [];
|
||||
type EventType = 'add' | 'change';
|
||||
|
||||
/**
|
||||
* Handles watcher update events and triggers provider index synchronization.
|
||||
*/
|
||||
async function onUpdate(
|
||||
eventType: EventType,
|
||||
filePath: string,
|
||||
provider: LLMProvider,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const result = await llmSessionsService.synchronizeProvider(provider, { fullRescan: true });
|
||||
logger.info(`LLM watcher sync complete for provider "${provider}" after ${eventType}`, {
|
||||
filePath,
|
||||
processed: result.processed,
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
logger.error(`LLM watcher sync failed for provider "${provider}"`, {
|
||||
eventType,
|
||||
filePath,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes LLM session watchers and performs an initial index sync.
|
||||
*/
|
||||
export async function initializeWatcher(): Promise<void> {
|
||||
logger.info('Setting up LLM session watchers...');
|
||||
|
||||
const initialSync = await llmSessionsService.synchronizeSessions();
|
||||
logger.info('Initial LLM session sync complete.', {
|
||||
processedByProvider: initialSync.processedByProvider,
|
||||
failures: initialSync.failures,
|
||||
});
|
||||
|
||||
for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) {
|
||||
try {
|
||||
// chokidar v4 emits ENOENT via the "error" event for missing roots and will not auto-recover.
|
||||
// Ensure provider folders exist before creating the watcher so watching stays active.
|
||||
await fsPromises.mkdir(rootPath, { recursive: true });
|
||||
|
||||
const watcher = chokidar.watch(rootPath, {
|
||||
ignored: WATCHER_IGNORED_PATTERNS,
|
||||
persistent: true,
|
||||
// Don't fire events for existing files on startup
|
||||
ignoreInitial: true,
|
||||
followSymlinks: false,
|
||||
// Reasonable depth limit
|
||||
depth: 6,
|
||||
// Use polling to fix Windows fs.watch buffering/batching issues.
|
||||
// It now stops relying on native filesystem events and checks for changes at intervals.
|
||||
usePolling: true,
|
||||
// Poll every 2000ms
|
||||
interval: 2_000,
|
||||
// Large binary files are more expensive to poll than text files.
|
||||
binaryInterval: 6_000,
|
||||
// Removed awaitWriteFinish to prevent delays when LLM streams to the file
|
||||
});
|
||||
|
||||
watcher
|
||||
.on('add', (filePath: string) => {
|
||||
void onUpdate('add', filePath, provider);
|
||||
})
|
||||
.on('change', (filePath: string) => {
|
||||
void onUpdate('change', filePath, provider);
|
||||
})
|
||||
.on('error', (error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
logger.error(`LLM watcher error for provider "${provider}"`, {
|
||||
error: message,
|
||||
});
|
||||
});
|
||||
|
||||
watchers.push(watcher);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
logger.error(`Failed to initialize LLM watcher for provider "${provider}"`, {
|
||||
rootPath,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user