mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-05-16 09:13:36 +00:00
feat(backend): setup mcp, image upload, and skills
This commit is contained in:
85
server/src/modules/llm/assets.service.ts
Normal file
85
server/src/modules/llm/assets.service.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { mkdir, writeFile } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
|
||||
const SUPPORTED_IMAGE_MIME_TYPES = new Set([
|
||||
'image/jpeg',
|
||||
'image/png',
|
||||
'image/gif',
|
||||
'image/webp',
|
||||
]);
|
||||
|
||||
const MIME_TO_EXTENSION: Record<string, string> = {
|
||||
'image/jpeg': '.jpg',
|
||||
'image/png': '.png',
|
||||
'image/gif': '.gif',
|
||||
'image/webp': '.webp',
|
||||
};
|
||||
|
||||
type UploadedImage = {
|
||||
originalname: string;
|
||||
mimetype: string;
|
||||
size: number;
|
||||
buffer: Buffer;
|
||||
};
|
||||
|
||||
export type StoredImageAsset = {
|
||||
originalName: string;
|
||||
storedName: string;
|
||||
absolutePath: string;
|
||||
relativePath: string;
|
||||
mimeType: string;
|
||||
size: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Persists uploaded images in `.cloudcli/assets` and returns resolved paths for provider calls.
|
||||
*/
|
||||
export const llmAssetsService = {
|
||||
async storeUploadedImages(
|
||||
images: UploadedImage[],
|
||||
options?: {
|
||||
workspacePath?: string;
|
||||
},
|
||||
): Promise<StoredImageAsset[]> {
|
||||
if (!images.length) {
|
||||
throw new AppError('At least one image file is required.', {
|
||||
code: 'IMAGE_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const workspaceRoot = path.resolve(options?.workspacePath ?? process.cwd());
|
||||
const assetsDirectory = path.join(workspaceRoot, '.cloudcli', 'assets');
|
||||
await mkdir(assetsDirectory, { recursive: true });
|
||||
|
||||
const storedAssets: StoredImageAsset[] = [];
|
||||
for (const image of images) {
|
||||
if (!SUPPORTED_IMAGE_MIME_TYPES.has(image.mimetype)) {
|
||||
throw new AppError(`Unsupported image type "${image.mimetype}".`, {
|
||||
code: 'UNSUPPORTED_IMAGE_TYPE',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const extension = (MIME_TO_EXTENSION[image.mimetype] ?? path.extname(image.originalname)) || '.img';
|
||||
const storedName = `${Date.now()}-${randomUUID()}${extension}`;
|
||||
const absolutePath = path.join(assetsDirectory, storedName);
|
||||
|
||||
await writeFile(absolutePath, image.buffer);
|
||||
|
||||
storedAssets.push({
|
||||
originalName: image.originalname,
|
||||
storedName,
|
||||
absolutePath,
|
||||
relativePath: path.relative(workspaceRoot, absolutePath).replace(/\\/g, '/'),
|
||||
mimeType: image.mimetype,
|
||||
size: image.size,
|
||||
});
|
||||
}
|
||||
|
||||
return storedAssets;
|
||||
},
|
||||
};
|
||||
211
server/src/modules/llm/llm-unifier.images.test.ts
Normal file
211
server/src/modules/llm/llm-unifier.images.test.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import { AppError } from '../../shared/utils/app-error.js';
|
||||
import { llmAssetsService } from './assets.service.js';
|
||||
import { ClaudeProvider } from './providers/claude.provider.js';
|
||||
import { CodexProvider } from './providers/codex.provider.js';
|
||||
import { CursorProvider } from './providers/cursor.provider.js';
|
||||
import { GeminiProvider } from './providers/gemini.provider.js';
|
||||
import { llmService } from './llm.service.js';
|
||||
|
||||
const asyncEvents = async function* (events: unknown[]) {
|
||||
for (const event of events) {
|
||||
yield event;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* This test covers the universal image-upload flow: store uploads under `.cloudcli/assets`.
|
||||
*/
|
||||
test('llmAssetsService stores uploaded images in .cloudcli/assets', { concurrency: false }, async () => {
|
||||
const workspaceRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-assets-'));
|
||||
try {
|
||||
const images = await llmAssetsService.storeUploadedImages(
|
||||
[
|
||||
{
|
||||
originalname: 'photo.jpg',
|
||||
mimetype: 'image/jpeg',
|
||||
size: 3,
|
||||
buffer: Buffer.from([0x01, 0x02, 0x03]),
|
||||
},
|
||||
{
|
||||
originalname: 'diagram.png',
|
||||
mimetype: 'image/png',
|
||||
size: 4,
|
||||
buffer: Buffer.from([0x11, 0x12, 0x13, 0x14]),
|
||||
},
|
||||
],
|
||||
{ workspacePath: workspaceRoot },
|
||||
);
|
||||
|
||||
assert.equal(images.length, 2);
|
||||
assert.ok(images[0]?.relativePath.startsWith('.cloudcli/assets/'));
|
||||
assert.ok(images[1]?.relativePath.startsWith('.cloudcli/assets/'));
|
||||
await fs.access(images[0]!.absolutePath);
|
||||
await fs.access(images[1]!.absolutePath);
|
||||
} finally {
|
||||
await fs.rm(workspaceRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers upload validation: unsupported mime types are rejected.
|
||||
*/
|
||||
test('llmAssetsService rejects unsupported image mime types', async () => {
|
||||
await assert.rejects(
|
||||
llmAssetsService.storeUploadedImages([
|
||||
{
|
||||
originalname: 'file.bmp',
|
||||
mimetype: 'image/bmp',
|
||||
size: 4,
|
||||
buffer: Buffer.from([0x10, 0x20, 0x30, 0x40]),
|
||||
},
|
||||
]),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'UNSUPPORTED_IMAGE_TYPE' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Claude image input support: prompt becomes async iterable with text + base64 image blocks.
|
||||
*/
|
||||
test('claude provider builds async prompt payload with base64 image blocks', { concurrency: false }, async () => {
|
||||
const workspaceRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-claude-img-'));
|
||||
const imagePath = path.join(workspaceRoot, 'sample.jpg');
|
||||
const imageBytes = Buffer.from([0xaa, 0xbb, 0xcc]);
|
||||
await fs.writeFile(imagePath, imageBytes);
|
||||
|
||||
try {
|
||||
const provider = new ClaudeProvider() as any;
|
||||
const promptPayload = await provider.buildPromptInput(
|
||||
'describe this',
|
||||
[imagePath],
|
||||
workspaceRoot,
|
||||
);
|
||||
|
||||
assert.equal(typeof promptPayload[Symbol.asyncIterator], 'function');
|
||||
const iterator = promptPayload[Symbol.asyncIterator]();
|
||||
const first = await iterator.next();
|
||||
assert.equal(first.done, false);
|
||||
|
||||
const message = first.value as {
|
||||
type: string;
|
||||
message: {
|
||||
role: string;
|
||||
content: Array<Record<string, unknown>>;
|
||||
};
|
||||
};
|
||||
|
||||
assert.equal(message.type, 'user');
|
||||
assert.equal(message.message.role, 'user');
|
||||
assert.equal(message.message.content[0]?.type, 'text');
|
||||
assert.equal(message.message.content[0]?.text, 'describe this');
|
||||
assert.equal(message.message.content[1]?.type, 'image');
|
||||
const imageBlock = message.message.content[1] as {
|
||||
source: {
|
||||
type: string;
|
||||
media_type: string;
|
||||
data: string;
|
||||
};
|
||||
};
|
||||
assert.equal(imageBlock.source.type, 'base64');
|
||||
assert.equal(imageBlock.source.media_type, 'image/jpeg');
|
||||
assert.equal(imageBlock.source.data, imageBytes.toString('base64'));
|
||||
} finally {
|
||||
await fs.rm(workspaceRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Codex image input support: runStreamed receives text + local_image items.
|
||||
*/
|
||||
test('codex provider sends local_image prompt items when image paths are provided', async () => {
|
||||
const provider = new CodexProvider() as any;
|
||||
let capturedPrompt: unknown;
|
||||
|
||||
provider.loadCodexSdkModule = async () => ({
|
||||
Codex: class {
|
||||
startThread() {
|
||||
return {
|
||||
async runStreamed(prompt: unknown) {
|
||||
capturedPrompt = prompt;
|
||||
return { events: asyncEvents([]) };
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
resumeThread() {
|
||||
return {
|
||||
async runStreamed(prompt: unknown) {
|
||||
capturedPrompt = prompt;
|
||||
return { events: asyncEvents([]) };
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
await provider.createSdkExecution({
|
||||
prompt: 'analyze this image',
|
||||
sessionId: 'codex-image-1',
|
||||
isResume: false,
|
||||
imagePaths: ['assets/a.png'],
|
||||
workspacePath: '/tmp/workspace',
|
||||
});
|
||||
|
||||
assert.ok(Array.isArray(capturedPrompt));
|
||||
const promptItems = capturedPrompt as Array<Record<string, unknown>>;
|
||||
assert.equal(promptItems[0]?.type, 'text');
|
||||
assert.equal(promptItems[0]?.text, 'analyze this image');
|
||||
assert.equal(promptItems[1]?.type, 'local_image');
|
||||
assert.equal(promptItems[1]?.path, path.resolve('/tmp/workspace', 'assets/a.png'));
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Gemini/Cursor image handling: image paths are appended to the prompt payload.
|
||||
*/
|
||||
test('gemini and cursor providers append image path arrays to prompts', () => {
|
||||
const geminiProvider = new GeminiProvider() as any;
|
||||
const cursorProvider = new CursorProvider() as any;
|
||||
|
||||
const geminiInvocation = geminiProvider.createCliInvocation({
|
||||
prompt: 'summarize',
|
||||
sessionId: 'g-1',
|
||||
isResume: false,
|
||||
imagePaths: ['scripts/pic.jpg'],
|
||||
});
|
||||
|
||||
const cursorInvocation = cursorProvider.createCliInvocation({
|
||||
prompt: 'summarize',
|
||||
sessionId: 'c-1',
|
||||
isResume: false,
|
||||
imagePaths: ['scripts/pic.jpg'],
|
||||
});
|
||||
|
||||
const geminiPrompt = geminiInvocation.args[1];
|
||||
const cursorPrompt = cursorInvocation.args[cursorInvocation.args.length - 1];
|
||||
assert.ok(typeof geminiPrompt === 'string' && geminiPrompt.includes('["scripts/pic.jpg"]'));
|
||||
assert.ok(typeof cursorPrompt === 'string' && cursorPrompt.includes('["scripts/pic.jpg"]'));
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers API payload validation: imagePaths must be an array of strings.
|
||||
*/
|
||||
test('llmService rejects invalid imagePaths payloads before provider execution', async () => {
|
||||
await assert.rejects(
|
||||
llmService.startSession('cursor', {
|
||||
prompt: 'hello',
|
||||
imagePaths: [1, 2, 3],
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'INVALID_IMAGE_PATHS' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
});
|
||||
351
server/src/modules/llm/llm-unifier.mcp.test.ts
Normal file
351
server/src/modules/llm/llm-unifier.mcp.test.ts
Normal file
@@ -0,0 +1,351 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs/promises';
|
||||
import http from 'node:http';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import TOML from '@iarna/toml';
|
||||
|
||||
import { AppError } from '../../shared/utils/app-error.js';
|
||||
import { llmMcpService } from './mcp.service.js';
|
||||
|
||||
const patchHomeDir = (nextHomeDir: string) => {
|
||||
const original = os.homedir;
|
||||
(os as any).homedir = () => nextHomeDir;
|
||||
return () => {
|
||||
(os as any).homedir = original;
|
||||
};
|
||||
};
|
||||
|
||||
const readJson = async (filePath: string): Promise<Record<string, unknown>> => {
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return JSON.parse(content) as Record<string, unknown>;
|
||||
};
|
||||
|
||||
/**
|
||||
* This test covers Claude MCP support for all scopes (user/local/project) and all transports (stdio/http/sse),
|
||||
* including add, update/list, and remove operations.
|
||||
*/
|
||||
test('llmMcpService handles claude MCP scopes/transports with file-backed persistence', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-claude-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await llmMcpService.upsertProviderServer('claude', {
|
||||
name: 'claude-user-stdio',
|
||||
scope: 'user',
|
||||
transport: 'stdio',
|
||||
command: 'npx',
|
||||
args: ['-y', 'my-server'],
|
||||
env: { API_KEY: 'secret' },
|
||||
});
|
||||
|
||||
await llmMcpService.upsertProviderServer('claude', {
|
||||
name: 'claude-local-http',
|
||||
scope: 'local',
|
||||
transport: 'http',
|
||||
url: 'https://example.com/mcp',
|
||||
headers: { Authorization: 'Bearer token' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
await llmMcpService.upsertProviderServer('claude', {
|
||||
name: 'claude-project-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse',
|
||||
headers: { 'X-API-Key': 'abc' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const grouped = await llmMcpService.listProviderServers('claude', { workspacePath });
|
||||
assert.ok(grouped.user.some((server) => server.name === 'claude-user-stdio' && server.transport === 'stdio'));
|
||||
assert.ok(grouped.local.some((server) => server.name === 'claude-local-http' && server.transport === 'http'));
|
||||
assert.ok(grouped.project.some((server) => server.name === 'claude-project-sse' && server.transport === 'sse'));
|
||||
|
||||
// update behavior is the same upsert route with same name
|
||||
await llmMcpService.upsertProviderServer('claude', {
|
||||
name: 'claude-project-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse-updated',
|
||||
headers: { 'X-API-Key': 'updated' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const projectConfig = await readJson(path.join(workspacePath, '.mcp.json'));
|
||||
const projectServers = projectConfig.mcpServers as Record<string, unknown>;
|
||||
const projectServer = projectServers['claude-project-sse'] as Record<string, unknown>;
|
||||
assert.equal(projectServer.url, 'https://example.com/sse-updated');
|
||||
|
||||
const removeResult = await llmMcpService.removeProviderServer('claude', {
|
||||
name: 'claude-local-http',
|
||||
scope: 'local',
|
||||
workspacePath,
|
||||
});
|
||||
assert.equal(removeResult.removed, true);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Codex MCP support for user/project scopes, stdio/http formats,
|
||||
* and validation for unsupported scope/transport combinations.
|
||||
*/
|
||||
test('llmMcpService handles codex MCP TOML config and capability validation', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-codex-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await llmMcpService.upsertProviderServer('codex', {
|
||||
name: 'codex-user-stdio',
|
||||
scope: 'user',
|
||||
transport: 'stdio',
|
||||
command: 'python',
|
||||
args: ['server.py'],
|
||||
env: { API_KEY: 'x' },
|
||||
envVars: ['API_KEY'],
|
||||
cwd: '/tmp',
|
||||
});
|
||||
|
||||
await llmMcpService.upsertProviderServer('codex', {
|
||||
name: 'codex-project-http',
|
||||
scope: 'project',
|
||||
transport: 'http',
|
||||
url: 'https://codex.example.com/mcp',
|
||||
headers: { 'X-Custom-Header': 'value' },
|
||||
envHttpHeaders: { 'X-API-Key': 'MY_API_KEY_ENV' },
|
||||
bearerTokenEnvVar: 'MY_API_TOKEN',
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const userTomlPath = path.join(tempRoot, '.codex', 'config.toml');
|
||||
const userConfig = TOML.parse(await fs.readFile(userTomlPath, 'utf8')) as Record<string, unknown>;
|
||||
const userServers = userConfig.mcp_servers as Record<string, unknown>;
|
||||
const userStdio = userServers['codex-user-stdio'] as Record<string, unknown>;
|
||||
assert.equal(userStdio.command, 'python');
|
||||
|
||||
const projectTomlPath = path.join(workspacePath, '.codex', 'config.toml');
|
||||
const projectConfig = TOML.parse(await fs.readFile(projectTomlPath, 'utf8')) as Record<string, unknown>;
|
||||
const projectServers = projectConfig.mcp_servers as Record<string, unknown>;
|
||||
const projectHttp = projectServers['codex-project-http'] as Record<string, unknown>;
|
||||
assert.equal(projectHttp.url, 'https://codex.example.com/mcp');
|
||||
|
||||
await assert.rejects(
|
||||
llmMcpService.upsertProviderServer('codex', {
|
||||
name: 'codex-local',
|
||||
scope: 'local',
|
||||
transport: 'stdio',
|
||||
command: 'node',
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'MCP_SCOPE_NOT_SUPPORTED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
|
||||
await assert.rejects(
|
||||
llmMcpService.upsertProviderServer('codex', {
|
||||
name: 'codex-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse',
|
||||
workspacePath,
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'MCP_TRANSPORT_NOT_SUPPORTED' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Gemini/Cursor MCP JSON formats and user/project scope persistence.
|
||||
*/
|
||||
test('llmMcpService handles gemini and cursor MCP JSON config formats', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-gc-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await llmMcpService.upsertProviderServer('gemini', {
|
||||
name: 'gemini-stdio',
|
||||
scope: 'user',
|
||||
transport: 'stdio',
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: { TOKEN: '$TOKEN' },
|
||||
cwd: './server',
|
||||
});
|
||||
|
||||
await llmMcpService.upsertProviderServer('gemini', {
|
||||
name: 'gemini-http',
|
||||
scope: 'project',
|
||||
transport: 'http',
|
||||
url: 'https://gemini.example.com/mcp',
|
||||
headers: { Authorization: 'Bearer token' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
await llmMcpService.upsertProviderServer('cursor', {
|
||||
name: 'cursor-stdio',
|
||||
scope: 'project',
|
||||
transport: 'stdio',
|
||||
command: 'npx',
|
||||
args: ['-y', 'mcp-server'],
|
||||
env: { API_KEY: 'value' },
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
await llmMcpService.upsertProviderServer('cursor', {
|
||||
name: 'cursor-http',
|
||||
scope: 'user',
|
||||
transport: 'http',
|
||||
url: 'http://localhost:3333/mcp',
|
||||
headers: { API_KEY: 'value' },
|
||||
});
|
||||
|
||||
const geminiUserConfig = await readJson(path.join(tempRoot, '.gemini', 'settings.json'));
|
||||
const geminiUserServer = (geminiUserConfig.mcpServers as Record<string, unknown>)['gemini-stdio'] as Record<string, unknown>;
|
||||
assert.equal(geminiUserServer.command, 'node');
|
||||
assert.equal(geminiUserServer.type, undefined);
|
||||
|
||||
const geminiProjectConfig = await readJson(path.join(workspacePath, '.gemini', 'settings.json'));
|
||||
const geminiProjectServer = (geminiProjectConfig.mcpServers as Record<string, unknown>)['gemini-http'] as Record<string, unknown>;
|
||||
assert.equal(geminiProjectServer.type, 'http');
|
||||
|
||||
const cursorUserConfig = await readJson(path.join(tempRoot, '.cursor', 'mcp.json'));
|
||||
const cursorHttpServer = (cursorUserConfig.mcpServers as Record<string, unknown>)['cursor-http'] as Record<string, unknown>;
|
||||
assert.equal(cursorHttpServer.url, 'http://localhost:3333/mcp');
|
||||
assert.equal(cursorHttpServer.type, undefined);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers the global MCP adder requirement: only http/stdio are allowed and
|
||||
* one payload is written to all providers.
|
||||
*/
|
||||
test('llmMcpService global adder writes to all providers and rejects unsupported transports', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-global-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
const globalResult = await llmMcpService.addServerToAllProviders({
|
||||
name: 'global-http',
|
||||
scope: 'project',
|
||||
transport: 'http',
|
||||
url: 'https://global.example.com/mcp',
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
assert.equal(globalResult.length, 4);
|
||||
assert.ok(globalResult.every((entry) => entry.created === true));
|
||||
|
||||
const claudeProject = await readJson(path.join(workspacePath, '.mcp.json'));
|
||||
assert.ok((claudeProject.mcpServers as Record<string, unknown>)['global-http']);
|
||||
|
||||
const codexProject = TOML.parse(await fs.readFile(path.join(workspacePath, '.codex', 'config.toml'), 'utf8')) as Record<string, unknown>;
|
||||
assert.ok((codexProject.mcp_servers as Record<string, unknown>)['global-http']);
|
||||
|
||||
const geminiProject = await readJson(path.join(workspacePath, '.gemini', 'settings.json'));
|
||||
assert.ok((geminiProject.mcpServers as Record<string, unknown>)['global-http']);
|
||||
|
||||
const cursorProject = await readJson(path.join(workspacePath, '.cursor', 'mcp.json'));
|
||||
assert.ok((cursorProject.mcpServers as Record<string, unknown>)['global-http']);
|
||||
|
||||
await assert.rejects(
|
||||
llmMcpService.addServerToAllProviders({
|
||||
name: 'global-sse',
|
||||
scope: 'project',
|
||||
transport: 'sse',
|
||||
url: 'https://example.com/sse',
|
||||
workspacePath,
|
||||
}),
|
||||
(error: unknown) =>
|
||||
error instanceof AppError &&
|
||||
error.code === 'INVALID_GLOBAL_MCP_TRANSPORT' &&
|
||||
error.statusCode === 400,
|
||||
);
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers "run" behavior for both stdio and http MCP servers.
|
||||
*/
|
||||
test('llmMcpService runProviderServer probes stdio and http MCP servers', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-mcp-run-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
const server = http.createServer((_req, res) => {
|
||||
res.statusCode = 200;
|
||||
res.end('ok');
|
||||
});
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve) => server.listen(0, '127.0.0.1', () => resolve()));
|
||||
const address = server.address();
|
||||
assert.ok(address && typeof address === 'object');
|
||||
const url = `http://127.0.0.1:${address.port}/mcp`;
|
||||
|
||||
await llmMcpService.upsertProviderServer('gemini', {
|
||||
name: 'probe-http',
|
||||
scope: 'project',
|
||||
transport: 'http',
|
||||
url,
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
await llmMcpService.upsertProviderServer('cursor', {
|
||||
name: 'probe-stdio',
|
||||
scope: 'project',
|
||||
transport: 'stdio',
|
||||
command: process.execPath,
|
||||
args: ['-e', 'process.exit(0)'],
|
||||
workspacePath,
|
||||
});
|
||||
|
||||
const httpProbe = await llmMcpService.runProviderServer({
|
||||
provider: 'gemini',
|
||||
name: 'probe-http',
|
||||
scope: 'project',
|
||||
workspacePath,
|
||||
});
|
||||
assert.equal(httpProbe.reachable, true);
|
||||
assert.equal(httpProbe.transport, 'http');
|
||||
|
||||
const stdioProbe = await llmMcpService.runProviderServer({
|
||||
provider: 'cursor',
|
||||
name: 'probe-stdio',
|
||||
scope: 'project',
|
||||
workspacePath,
|
||||
});
|
||||
assert.equal(stdioProbe.reachable, true);
|
||||
assert.equal(stdioProbe.transport, 'stdio');
|
||||
} finally {
|
||||
server.close();
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
207
server/src/modules/llm/llm-unifier.skills.test.ts
Normal file
207
server/src/modules/llm/llm-unifier.skills.test.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import { llmSkillsService } from './skills.service.js';
|
||||
|
||||
const patchHomeDir = (nextHomeDir: string) => {
|
||||
const original = os.homedir;
|
||||
(os as any).homedir = () => nextHomeDir;
|
||||
return () => {
|
||||
(os as any).homedir = original;
|
||||
};
|
||||
};
|
||||
|
||||
const createSkill = async (
|
||||
rootSkillsDirectory: string,
|
||||
directoryName: string,
|
||||
metadata: {
|
||||
name: string;
|
||||
description: string;
|
||||
},
|
||||
) => {
|
||||
const skillDirectory = path.join(rootSkillsDirectory, directoryName);
|
||||
await fs.mkdir(skillDirectory, { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(skillDirectory, 'SKILL.md'),
|
||||
`---\nname: ${metadata.name}\ndescription: ${metadata.description}\n---\n\n# ${metadata.name}\n`,
|
||||
'utf8',
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* This test covers Claude skills fetching from user/project/plugin locations and plugin namespace invocation.
|
||||
*/
|
||||
test('llmSkillsService lists claude user/project/plugin skills with proper invocation names', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-skills-claude-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
const pluginInstallPath = path.join(tempRoot, 'plugin-install');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await createSkill(path.join(tempRoot, '.claude', 'skills'), 'user-helper', {
|
||||
name: 'user-helper',
|
||||
description: 'User skill description',
|
||||
});
|
||||
await createSkill(path.join(workspacePath, '.claude', 'skills'), 'project-helper', {
|
||||
name: 'project-helper',
|
||||
description: 'Project skill description',
|
||||
});
|
||||
await createSkill(path.join(pluginInstallPath, 'skills'), 'plugin-helper', {
|
||||
name: 'plugin-helper',
|
||||
description: 'Plugin skill description',
|
||||
});
|
||||
|
||||
await fs.mkdir(path.join(tempRoot, '.claude', 'plugins'), { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(tempRoot, '.claude', 'settings.json'),
|
||||
JSON.stringify({
|
||||
enabledPlugins: {
|
||||
'example-skills@anthropic-agent-skills': true,
|
||||
},
|
||||
}),
|
||||
'utf8',
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(tempRoot, '.claude', 'plugins', 'installed_plugins.json'),
|
||||
JSON.stringify({
|
||||
version: 2,
|
||||
plugins: {
|
||||
'example-skills@anthropic-agent-skills': [
|
||||
{
|
||||
installPath: pluginInstallPath,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const skills = await llmSkillsService.listProviderSkills('claude', { workspacePath });
|
||||
assert.ok(skills.some((skill) => skill.scope === 'user' && skill.invocation === '/user-helper'));
|
||||
assert.ok(skills.some((skill) => skill.scope === 'project' && skill.invocation === '/project-helper'));
|
||||
assert.ok(skills.some((skill) => skill.scope === 'plugin' && skill.invocation === '/example-skills:plugin-helper'));
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Codex skills discovery across repo/user/system locations and `$` invocation prefix.
|
||||
*/
|
||||
test('llmSkillsService lists codex skills from repo/user/system locations with dollar invocation', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-skills-codex-'));
|
||||
const repoRoot = path.join(tempRoot, 'repo');
|
||||
const workspacePath = path.join(repoRoot, 'packages', 'app');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
await fs.mkdir(path.join(repoRoot, '.git'), { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await createSkill(path.join(workspacePath, '.agents', 'skills'), 'cwd-skill', {
|
||||
name: 'cwd-skill',
|
||||
description: 'cwd skill',
|
||||
});
|
||||
await createSkill(path.join(workspacePath, '..', '.agents', 'skills'), 'parent-skill', {
|
||||
name: 'parent-skill',
|
||||
description: 'parent skill',
|
||||
});
|
||||
await createSkill(path.join(repoRoot, '.agents', 'skills'), 'repo-root-skill', {
|
||||
name: 'repo-root-skill',
|
||||
description: 'repo root skill',
|
||||
});
|
||||
await createSkill(path.join(tempRoot, '.agents', 'skills'), 'user-skill', {
|
||||
name: 'user-skill',
|
||||
description: 'user skill',
|
||||
});
|
||||
await createSkill(path.join(tempRoot, '.codex', 'skills', '.system'), 'system-skill', {
|
||||
name: 'system-skill',
|
||||
description: 'system skill',
|
||||
});
|
||||
|
||||
const skills = await llmSkillsService.listProviderSkills('codex', { workspacePath });
|
||||
assert.ok(skills.some((skill) => skill.name === 'cwd-skill' && skill.invocation === '$cwd-skill'));
|
||||
assert.ok(skills.some((skill) => skill.name === 'parent-skill' && skill.invocation === '$parent-skill'));
|
||||
assert.ok(skills.some((skill) => skill.name === 'repo-root-skill' && skill.invocation === '$repo-root-skill'));
|
||||
assert.ok(skills.some((skill) => skill.name === 'user-skill' && skill.invocation === '$user-skill'));
|
||||
assert.ok(skills.some((skill) => skill.name === 'system-skill' && skill.invocation === '$system-skill'));
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Gemini skill fetch locations and slash-based invocation format.
|
||||
*/
|
||||
test('llmSkillsService lists gemini skills from documented directories', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-skills-gemini-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await createSkill(path.join(tempRoot, '.gemini', 'skills'), 'home-gemini', {
|
||||
name: 'home-gemini',
|
||||
description: 'home gemini skill',
|
||||
});
|
||||
await createSkill(path.join(tempRoot, '.agents', 'skills'), 'home-agents', {
|
||||
name: 'home-agents',
|
||||
description: 'home agents skill',
|
||||
});
|
||||
await createSkill(path.join(workspacePath, '.gemini', 'skills'), 'project-gemini', {
|
||||
name: 'project-gemini',
|
||||
description: 'project gemini skill',
|
||||
});
|
||||
await createSkill(path.join(workspacePath, '.agents', 'skills'), 'project-agents', {
|
||||
name: 'project-agents',
|
||||
description: 'project agents skill',
|
||||
});
|
||||
|
||||
const skills = await llmSkillsService.listProviderSkills('gemini', { workspacePath });
|
||||
assert.ok(skills.some((skill) => skill.invocation === '/home-gemini'));
|
||||
assert.ok(skills.some((skill) => skill.invocation === '/home-agents'));
|
||||
assert.ok(skills.some((skill) => skill.invocation === '/project-gemini'));
|
||||
assert.ok(skills.some((skill) => skill.invocation === '/project-agents'));
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* This test covers Cursor skill fetch locations and slash-based invocation format.
|
||||
*/
|
||||
test('llmSkillsService lists cursor skills from documented directories', { concurrency: false }, async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), 'llm-skills-cursor-'));
|
||||
const workspacePath = path.join(tempRoot, 'workspace');
|
||||
await fs.mkdir(workspacePath, { recursive: true });
|
||||
|
||||
const restoreHomeDir = patchHomeDir(tempRoot);
|
||||
try {
|
||||
await createSkill(path.join(workspacePath, '.agents', 'skills'), 'project-agents', {
|
||||
name: 'project-agents',
|
||||
description: 'project agents skill',
|
||||
});
|
||||
await createSkill(path.join(workspacePath, '.cursor', 'skills'), 'project-cursor', {
|
||||
name: 'project-cursor',
|
||||
description: 'project cursor skill',
|
||||
});
|
||||
await createSkill(path.join(tempRoot, '.cursor', 'skills'), 'user-cursor', {
|
||||
name: 'user-cursor',
|
||||
description: 'user cursor skill',
|
||||
});
|
||||
|
||||
const skills = await llmSkillsService.listProviderSkills('cursor', { workspacePath });
|
||||
assert.ok(skills.some((skill) => skill.invocation === '/project-agents'));
|
||||
assert.ok(skills.some((skill) => skill.invocation === '/project-cursor'));
|
||||
assert.ok(skills.some((skill) => skill.invocation === '/user-cursor'));
|
||||
} finally {
|
||||
restoreHomeDir();
|
||||
await fs.rm(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
@@ -1,13 +1,27 @@
|
||||
import express, { type NextFunction, type Request, type Response } from 'express';
|
||||
import multer from 'multer';
|
||||
import path from 'node:path';
|
||||
|
||||
import { asyncHandler } from '@/shared/http/async-handler.js';
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
import { createApiErrorResponse, createApiSuccessResponse } from '@/shared/http/api-response.js';
|
||||
import { llmService } from '@/modules/llm/llm.service.js';
|
||||
import { llmSessionsService } from '@/modules/llm/sessions.service.js';
|
||||
import { llmAssetsService } from '@/modules/llm/assets.service.js';
|
||||
import type { McpScope, McpTransport, UpsertMcpServerInput } from '@/modules/llm/mcp.service.js';
|
||||
import { llmMcpService } from '@/modules/llm/mcp.service.js';
|
||||
import { llmSkillsService } from '@/modules/llm/skills.service.js';
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
import { logger } from '@/shared/utils/logger.js';
|
||||
|
||||
const router = express.Router();
|
||||
const upload = multer({
|
||||
storage: multer.memoryStorage(),
|
||||
limits: {
|
||||
files: 10,
|
||||
fileSize: 20 * 1024 * 1024,
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Safely reads an Express path parameter that may arrive as string or string[].
|
||||
@@ -68,6 +82,139 @@ const parseRenamePayload = (payload: unknown): { summary: string } => {
|
||||
return { summary };
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads optional query values and trims surrounding whitespace.
|
||||
*/
|
||||
const readOptionalQueryString = (value: unknown): string | undefined => {
|
||||
if (typeof value !== 'string') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = value.trim();
|
||||
return normalized.length > 0 ? normalized : undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates MCP scope query/body values.
|
||||
*/
|
||||
const parseMcpScope = (value: unknown): McpScope | undefined => {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = readOptionalQueryString(value);
|
||||
if (!normalized) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (normalized === 'user' || normalized === 'local' || normalized === 'project') {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
throw new AppError(`Unsupported MCP scope "${normalized}".`, {
|
||||
code: 'INVALID_MCP_SCOPE',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates MCP transport query/body values.
|
||||
*/
|
||||
const parseMcpTransport = (value: unknown): McpTransport => {
|
||||
const normalized = readOptionalQueryString(value);
|
||||
if (!normalized) {
|
||||
throw new AppError('transport is required.', {
|
||||
code: 'MCP_TRANSPORT_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (normalized === 'stdio' || normalized === 'http' || normalized === 'sse') {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
throw new AppError(`Unsupported MCP transport "${normalized}".`, {
|
||||
code: 'INVALID_MCP_TRANSPORT',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses and validates MCP upsert payload.
|
||||
*/
|
||||
const parseMcpUpsertPayload = (payload: unknown): UpsertMcpServerInput => {
|
||||
if (!payload || typeof payload !== 'object') {
|
||||
throw new AppError('Request body must be an object.', {
|
||||
code: 'INVALID_REQUEST_BODY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const body = payload as Record<string, unknown>;
|
||||
const name = readOptionalQueryString(body.name);
|
||||
if (!name) {
|
||||
throw new AppError('name is required.', {
|
||||
code: 'MCP_NAME_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const transport = parseMcpTransport(body.transport);
|
||||
const scope = parseMcpScope(body.scope);
|
||||
const workspacePath = readOptionalQueryString(body.workspacePath);
|
||||
|
||||
return {
|
||||
name,
|
||||
transport,
|
||||
scope,
|
||||
workspacePath,
|
||||
command: readOptionalQueryString(body.command),
|
||||
args: Array.isArray(body.args) ? body.args.filter((entry): entry is string => typeof entry === 'string') : undefined,
|
||||
env: typeof body.env === 'object' && body.env !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.env as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
cwd: readOptionalQueryString(body.cwd),
|
||||
url: readOptionalQueryString(body.url),
|
||||
headers: typeof body.headers === 'object' && body.headers !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.headers as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
envVars: Array.isArray(body.envVars)
|
||||
? body.envVars.filter((entry): entry is string => typeof entry === 'string')
|
||||
: undefined,
|
||||
bearerTokenEnvVar: readOptionalQueryString(body.bearerTokenEnvVar),
|
||||
envHttpHeaders: typeof body.envHttpHeaders === 'object' && body.envHttpHeaders !== null
|
||||
? Object.fromEntries(
|
||||
Object.entries(body.envHttpHeaders as Record<string, unknown>).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string',
|
||||
),
|
||||
)
|
||||
: undefined,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts any provider route parameter into the strongly typed provider union.
|
||||
*/
|
||||
const parseProvider = (value: unknown): LLMProvider => {
|
||||
const normalized = normalizeProviderParam(value);
|
||||
if (normalized === 'claude' || normalized === 'codex' || normalized === 'cursor' || normalized === 'gemini') {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
throw new AppError(`Unsupported provider "${normalized}".`, {
|
||||
code: 'UNSUPPORTED_PROVIDER',
|
||||
statusCode: 400,
|
||||
});
|
||||
};
|
||||
|
||||
router.get(
|
||||
'/providers',
|
||||
asyncHandler(async (_req: Request, res: Response) => {
|
||||
@@ -78,7 +225,7 @@ router.get(
|
||||
router.get(
|
||||
'/providers/:provider/models',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const models = await llmService.listModels(provider);
|
||||
res.json(createApiSuccessResponse({ provider, models }));
|
||||
}),
|
||||
@@ -87,7 +234,7 @@ router.get(
|
||||
router.get(
|
||||
'/providers/:provider/sessions',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const sessions = llmService.listSessions(provider);
|
||||
res.json(createApiSuccessResponse({ provider, sessions }));
|
||||
}),
|
||||
@@ -96,7 +243,7 @@ router.get(
|
||||
router.get(
|
||||
'/providers/:provider/sessions/:sessionId',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const session = llmService.getSession(provider, sessionId);
|
||||
if (!session) {
|
||||
@@ -113,7 +260,7 @@ router.get(
|
||||
router.post(
|
||||
'/providers/:provider/sessions/start',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const snapshot = await llmService.startSession(provider, req.body);
|
||||
|
||||
const waitForCompletion = parseWaitForCompletion(req);
|
||||
@@ -135,7 +282,7 @@ router.post(
|
||||
router.post(
|
||||
'/providers/:provider/sessions/:sessionId/resume',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
|
||||
const snapshot = await llmService.resumeSession(provider, sessionId, req.body);
|
||||
@@ -154,7 +301,7 @@ router.post(
|
||||
router.post(
|
||||
'/providers/:provider/sessions/:sessionId/stop',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const stopped = await llmService.stopSession(provider, sessionId);
|
||||
res.json(createApiSuccessResponse({ provider, sessionId, stopped }));
|
||||
@@ -164,7 +311,7 @@ router.post(
|
||||
router.patch(
|
||||
'/providers/:provider/sessions/:sessionId/model',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const model = typeof req.body?.model === 'string' ? req.body.model.trim() : '';
|
||||
if (!model) {
|
||||
@@ -188,7 +335,7 @@ router.patch(
|
||||
router.patch(
|
||||
'/providers/:provider/sessions/:sessionId/thinking',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = normalizeProviderParam(req.params.provider);
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const sessionId = readPathParam(req.params.sessionId, 'sessionId');
|
||||
const thinkingMode =
|
||||
typeof req.body?.thinkingMode === 'string' ? req.body.thinkingMode.trim() : '';
|
||||
@@ -211,6 +358,180 @@ router.patch(
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Uploads one or more images into `.cloudcli/assets` so providers can reuse file paths.
|
||||
*/
|
||||
router.post(
|
||||
'/assets/images',
|
||||
upload.array('images', 10),
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const workspacePath = readOptionalQueryString((req.body as Record<string, unknown> | undefined)?.workspacePath);
|
||||
const filesValue = (req as Request & { files?: unknown }).files;
|
||||
const files = Array.isArray(filesValue) ? filesValue as Array<{
|
||||
originalname: string;
|
||||
mimetype: string;
|
||||
size: number;
|
||||
buffer: Buffer;
|
||||
}> : [];
|
||||
const images = await llmAssetsService.storeUploadedImages(files, { workspacePath });
|
||||
res.status(201).json(createApiSuccessResponse({ images }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Lists MCP servers for one provider grouped by user/local/project scopes.
|
||||
*/
|
||||
router.get(
|
||||
'/providers/:provider/mcp/servers',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const workspacePath = readOptionalQueryString(req.query.workspacePath);
|
||||
const scope = parseMcpScope(req.query.scope);
|
||||
|
||||
if (scope) {
|
||||
const servers = await llmMcpService.listProviderServersForScope(
|
||||
provider,
|
||||
scope,
|
||||
path.resolve(workspacePath ?? process.cwd()),
|
||||
);
|
||||
res.json(createApiSuccessResponse({ provider, scope, servers }));
|
||||
return;
|
||||
}
|
||||
|
||||
const groupedServers = await llmMcpService.listProviderServers(provider, { workspacePath });
|
||||
res.json(createApiSuccessResponse({ provider, scopes: groupedServers }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Adds one MCP server for one provider and scope.
|
||||
*/
|
||||
router.post(
|
||||
'/providers/:provider/mcp/servers',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const payload = parseMcpUpsertPayload(req.body);
|
||||
const server = await llmMcpService.upsertProviderServer(provider, payload);
|
||||
res.status(201).json(createApiSuccessResponse({ server }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Updates one provider MCP server definition.
|
||||
*/
|
||||
router.put(
|
||||
'/providers/:provider/mcp/servers/:name',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const payload = parseMcpUpsertPayload({
|
||||
...((req.body && typeof req.body === 'object') ? req.body as Record<string, unknown> : {}),
|
||||
name: readPathParam(req.params.name, 'name'),
|
||||
});
|
||||
const server = await llmMcpService.upsertProviderServer(provider, payload);
|
||||
res.json(createApiSuccessResponse({ server }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Removes one provider MCP server from its configured scope.
|
||||
*/
|
||||
router.delete(
|
||||
'/providers/:provider/mcp/servers/:name',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const scope = parseMcpScope(req.query.scope);
|
||||
const workspacePath = readOptionalQueryString(req.query.workspacePath);
|
||||
const result = await llmMcpService.removeProviderServer(provider, {
|
||||
name: readPathParam(req.params.name, 'name'),
|
||||
scope,
|
||||
workspacePath,
|
||||
});
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Executes a lightweight startup/connectivity probe for one provider MCP server.
|
||||
*/
|
||||
router.post(
|
||||
'/providers/:provider/mcp/servers/:name/run',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const body = (req.body as Record<string, unknown> | undefined) ?? {};
|
||||
const scope = parseMcpScope(body.scope ?? req.query.scope);
|
||||
const workspacePath = readOptionalQueryString(body.workspacePath ?? req.query.workspacePath);
|
||||
const result = await llmMcpService.runProviderServer({
|
||||
provider,
|
||||
name: readPathParam(req.params.name, 'name'),
|
||||
scope,
|
||||
workspacePath,
|
||||
});
|
||||
res.json(createApiSuccessResponse(result));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Adds one HTTP/stdio MCP server to every provider.
|
||||
*/
|
||||
router.post(
|
||||
'/mcp/servers/global',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const payload = parseMcpUpsertPayload(req.body);
|
||||
if (payload.scope === 'local') {
|
||||
throw new AppError('Global MCP add supports only "user" or "project" scopes.', {
|
||||
code: 'INVALID_GLOBAL_MCP_SCOPE',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
const results = await llmMcpService.addServerToAllProviders({
|
||||
...payload,
|
||||
scope: payload.scope === 'user' ? 'user' : 'project',
|
||||
});
|
||||
res.status(201).json(createApiSuccessResponse({ results }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Lists provider-specific skills from all documented skill directories.
|
||||
*/
|
||||
router.get(
|
||||
'/providers/:provider/skills',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const provider = parseProvider(req.params.provider);
|
||||
const workspacePath = readOptionalQueryString(req.query.workspacePath);
|
||||
const skills = await llmSkillsService.listProviderSkills(provider, { workspacePath });
|
||||
res.json(createApiSuccessResponse({ provider, skills }));
|
||||
}),
|
||||
);
|
||||
|
||||
/**
|
||||
* Lists skills for one provider or for all providers in a single response.
|
||||
*/
|
||||
router.get(
|
||||
'/skills',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
const providerQuery = readOptionalQueryString(req.query.provider);
|
||||
const workspacePath = readOptionalQueryString(req.query.workspacePath);
|
||||
if (providerQuery) {
|
||||
const provider = parseProvider(providerQuery);
|
||||
const skills = await llmSkillsService.listProviderSkills(provider, { workspacePath });
|
||||
res.json(createApiSuccessResponse({ provider, skills }));
|
||||
return;
|
||||
}
|
||||
|
||||
const providers: LLMProvider[] = ['claude', 'codex', 'cursor', 'gemini'];
|
||||
const byProvider = Object.fromEntries(
|
||||
await Promise.all(
|
||||
providers.map(async (provider) => ([
|
||||
provider,
|
||||
await llmSkillsService.listProviderSkills(provider, { workspacePath }),
|
||||
])),
|
||||
),
|
||||
);
|
||||
res.json(createApiSuccessResponse({ providers: byProvider }));
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/sessions/:sessionId/history',
|
||||
asyncHandler(async (req: Request, res: Response) => {
|
||||
|
||||
@@ -20,6 +20,35 @@ const normalizeOptionalString = (value: unknown): string | undefined => {
|
||||
return normalized.length > 0 ? normalized : undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates and normalizes optional image path arrays.
|
||||
*/
|
||||
const normalizeImagePaths = (value: unknown): string[] | undefined => {
|
||||
if (value === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!Array.isArray(value)) {
|
||||
throw new AppError('imagePaths must be an array of strings.', {
|
||||
code: 'INVALID_IMAGE_PATHS',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const normalizedPaths = value
|
||||
.map((entry) => (typeof entry === 'string' ? entry.trim() : ''))
|
||||
.filter((entry) => entry.length > 0);
|
||||
|
||||
if (normalizedPaths.length !== value.length) {
|
||||
throw new AppError('imagePaths must contain non-empty strings only.', {
|
||||
code: 'INVALID_IMAGE_PATHS',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return normalizedPaths;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates and normalizes runtime permission mode.
|
||||
*/
|
||||
@@ -145,6 +174,7 @@ function parseStartPayload(payload: unknown): StartSessionInput {
|
||||
sessionId: normalizeOptionalString(body.sessionId),
|
||||
model: normalizeOptionalString(body.model),
|
||||
thinkingMode: normalizeOptionalString(body.thinkingMode),
|
||||
imagePaths: normalizeImagePaths(body.imagePaths),
|
||||
runtimePermissionMode: normalizePermissionMode(body.runtimePermissionMode),
|
||||
allowYolo: body.allowYolo === true,
|
||||
};
|
||||
|
||||
817
server/src/modules/llm/mcp.service.ts
Normal file
817
server/src/modules/llm/mcp.service.ts
Normal file
@@ -0,0 +1,817 @@
|
||||
import { mkdir, readFile, writeFile } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { once } from 'node:events';
|
||||
|
||||
import spawn from 'cross-spawn';
|
||||
import TOML from '@iarna/toml';
|
||||
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
import { AppError } from '@/shared/utils/app-error.js';
|
||||
|
||||
export type McpScope = 'user' | 'local' | 'project';
|
||||
export type McpTransport = 'stdio' | 'http' | 'sse';
|
||||
|
||||
export type UnifiedMcpServer = {
|
||||
provider: LLMProvider;
|
||||
name: string;
|
||||
scope: McpScope;
|
||||
transport: McpTransport;
|
||||
command?: string;
|
||||
args?: string[];
|
||||
env?: Record<string, string>;
|
||||
cwd?: string;
|
||||
url?: string;
|
||||
headers?: Record<string, string>;
|
||||
envVars?: string[];
|
||||
bearerTokenEnvVar?: string;
|
||||
envHttpHeaders?: Record<string, string>;
|
||||
};
|
||||
|
||||
export type UpsertMcpServerInput = {
|
||||
name: string;
|
||||
scope?: McpScope;
|
||||
transport: McpTransport;
|
||||
workspacePath?: string;
|
||||
command?: string;
|
||||
args?: string[];
|
||||
env?: Record<string, string>;
|
||||
cwd?: string;
|
||||
url?: string;
|
||||
headers?: Record<string, string>;
|
||||
envVars?: string[];
|
||||
bearerTokenEnvVar?: string;
|
||||
envHttpHeaders?: Record<string, string>;
|
||||
};
|
||||
|
||||
const PROVIDER_CAPABILITIES: Record<LLMProvider, { scopes: McpScope[]; transports: McpTransport[] }> = {
|
||||
claude: { scopes: ['user', 'local', 'project'], transports: ['stdio', 'http', 'sse'] },
|
||||
codex: { scopes: ['user', 'project'], transports: ['stdio', 'http'] },
|
||||
cursor: { scopes: ['user', 'project'], transports: ['stdio', 'http', 'sse'] },
|
||||
gemini: { scopes: ['user', 'project'], transports: ['stdio', 'http', 'sse'] },
|
||||
};
|
||||
|
||||
const PROVIDERS: LLMProvider[] = ['claude', 'codex', 'cursor', 'gemini'];
|
||||
|
||||
/**
|
||||
* Unified MCP configuration service backed by provider-native config files.
|
||||
*/
|
||||
export const llmMcpService = {
|
||||
/**
|
||||
* Lists MCP servers for one provider grouped by user/local/project scopes.
|
||||
*/
|
||||
async listProviderServers(
|
||||
provider: LLMProvider,
|
||||
options?: { workspacePath?: string },
|
||||
): Promise<Record<McpScope, UnifiedMcpServer[]>> {
|
||||
const workspacePath = resolveWorkspacePath(options?.workspacePath);
|
||||
const grouped: Record<McpScope, UnifiedMcpServer[]> = {
|
||||
user: [],
|
||||
local: [],
|
||||
project: [],
|
||||
};
|
||||
|
||||
const capability = PROVIDER_CAPABILITIES[provider];
|
||||
for (const scope of capability.scopes) {
|
||||
const servers = await this.listProviderServersForScope(provider, scope, workspacePath);
|
||||
grouped[scope] = servers;
|
||||
}
|
||||
|
||||
return grouped;
|
||||
},
|
||||
|
||||
/**
|
||||
* Writes one MCP server definition into the provider's config file for the selected scope.
|
||||
*/
|
||||
async upsertProviderServer(provider: LLMProvider, input: UpsertMcpServerInput): Promise<UnifiedMcpServer> {
|
||||
validateProviderScopeAndTransport(provider, input.scope ?? 'project', input.transport);
|
||||
const scope = input.scope ?? 'project';
|
||||
const workspacePath = resolveWorkspacePath(input.workspacePath);
|
||||
const normalizedName = normalizeServerName(input.name);
|
||||
const scopedServers = await readScopedProviderServers(provider, scope, workspacePath);
|
||||
scopedServers[normalizedName] = buildProviderServerConfig(provider, input);
|
||||
await writeScopedProviderServers(provider, scope, workspacePath, scopedServers);
|
||||
|
||||
return {
|
||||
provider,
|
||||
name: normalizedName,
|
||||
scope,
|
||||
transport: input.transport,
|
||||
command: input.command,
|
||||
args: input.args,
|
||||
env: input.env,
|
||||
cwd: input.cwd,
|
||||
url: input.url,
|
||||
headers: input.headers,
|
||||
envVars: input.envVars,
|
||||
bearerTokenEnvVar: input.bearerTokenEnvVar,
|
||||
envHttpHeaders: input.envHttpHeaders,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Removes one MCP server definition from the provider's config file.
|
||||
*/
|
||||
async removeProviderServer(
|
||||
provider: LLMProvider,
|
||||
input: { name: string; scope?: McpScope; workspacePath?: string },
|
||||
): Promise<{ removed: boolean; provider: LLMProvider; name: string; scope: McpScope }> {
|
||||
const scope = input.scope ?? 'project';
|
||||
validateProviderScopeAndTransport(provider, scope, 'stdio');
|
||||
const workspacePath = resolveWorkspacePath(input.workspacePath);
|
||||
const normalizedName = normalizeServerName(input.name);
|
||||
const scopedServers = await readScopedProviderServers(provider, scope, workspacePath);
|
||||
const removed = Object.prototype.hasOwnProperty.call(scopedServers, normalizedName);
|
||||
if (removed) {
|
||||
delete scopedServers[normalizedName];
|
||||
await writeScopedProviderServers(provider, scope, workspacePath, scopedServers);
|
||||
}
|
||||
|
||||
return { removed, provider, name: normalizedName, scope };
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds one MCP server to all providers using the same input shape.
|
||||
*/
|
||||
async addServerToAllProviders(
|
||||
input: Omit<UpsertMcpServerInput, 'scope'> & { scope?: Exclude<McpScope, 'local'> },
|
||||
): Promise<Array<{ provider: LLMProvider; created: boolean; error?: string }>> {
|
||||
if (input.transport !== 'stdio' && input.transport !== 'http') {
|
||||
throw new AppError('Global MCP add supports only "stdio" and "http".', {
|
||||
code: 'INVALID_GLOBAL_MCP_TRANSPORT',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const scope = input.scope ?? 'project';
|
||||
const results: Array<{ provider: LLMProvider; created: boolean; error?: string }> = [];
|
||||
for (const provider of PROVIDERS) {
|
||||
try {
|
||||
await this.upsertProviderServer(provider, { ...input, scope });
|
||||
results.push({ provider, created: true });
|
||||
} catch (error) {
|
||||
results.push({
|
||||
provider,
|
||||
created: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
},
|
||||
|
||||
/**
|
||||
* Performs a lightweight startup/connectivity check for one configured MCP server.
|
||||
*/
|
||||
async runProviderServer(input: {
|
||||
provider: LLMProvider;
|
||||
name: string;
|
||||
scope?: McpScope;
|
||||
workspacePath?: string;
|
||||
}): Promise<{
|
||||
provider: LLMProvider;
|
||||
name: string;
|
||||
scope: McpScope;
|
||||
transport: McpTransport;
|
||||
reachable: boolean;
|
||||
statusCode?: number;
|
||||
error?: string;
|
||||
}> {
|
||||
const scope = input.scope ?? 'project';
|
||||
const workspacePath = resolveWorkspacePath(input.workspacePath);
|
||||
const normalizedName = normalizeServerName(input.name);
|
||||
const scopedServers = await readScopedProviderServers(input.provider, scope, workspacePath);
|
||||
const rawConfig = scopedServers[normalizedName];
|
||||
if (!rawConfig || typeof rawConfig !== 'object') {
|
||||
throw new AppError(`MCP server "${normalizedName}" was not found.`, {
|
||||
code: 'MCP_SERVER_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
const normalized = normalizeServerConfig(input.provider, scope, normalizedName, rawConfig);
|
||||
if (!normalized) {
|
||||
throw new AppError(`MCP server "${normalizedName}" has an invalid configuration.`, {
|
||||
code: 'MCP_SERVER_INVALID_CONFIG',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (normalized.transport === 'stdio') {
|
||||
const result = await runStdioServerProbe(normalized, workspacePath);
|
||||
return {
|
||||
provider: input.provider,
|
||||
name: normalizedName,
|
||||
scope,
|
||||
transport: normalized.transport,
|
||||
reachable: result.reachable,
|
||||
error: result.error,
|
||||
};
|
||||
}
|
||||
|
||||
const result = await runHttpServerProbe(normalized.url ?? '');
|
||||
return {
|
||||
provider: input.provider,
|
||||
name: normalizedName,
|
||||
scope,
|
||||
transport: normalized.transport,
|
||||
reachable: result.reachable,
|
||||
statusCode: result.statusCode,
|
||||
error: result.error,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Reads and normalizes one provider scope into unified MCP server records.
|
||||
*/
|
||||
async listProviderServersForScope(
|
||||
provider: LLMProvider,
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
): Promise<UnifiedMcpServer[]> {
|
||||
if (!PROVIDER_CAPABILITIES[provider].scopes.includes(scope)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const scopedServers = await readScopedProviderServers(provider, scope, workspacePath);
|
||||
return Object.entries(scopedServers)
|
||||
.map(([name, rawConfig]) => normalizeServerConfig(provider, scope, name, rawConfig))
|
||||
.filter((entry): entry is UnifiedMcpServer => entry !== null);
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolves workspace paths once so all scope loaders read from a consistent absolute root.
|
||||
*/
|
||||
function resolveWorkspacePath(workspacePath?: string): string {
|
||||
return path.resolve(workspacePath ?? process.cwd());
|
||||
}
|
||||
|
||||
/**
|
||||
* Restricts MCP server names to non-empty trimmed strings.
|
||||
*/
|
||||
function normalizeServerName(name: string): string {
|
||||
const normalized = name.trim();
|
||||
if (!normalized) {
|
||||
throw new AppError('MCP server name is required.', {
|
||||
code: 'MCP_SERVER_NAME_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies provider capability checks before read/write operations.
|
||||
*/
|
||||
function validateProviderScopeAndTransport(
|
||||
provider: LLMProvider,
|
||||
scope: McpScope,
|
||||
transport: McpTransport,
|
||||
): void {
|
||||
const capability = PROVIDER_CAPABILITIES[provider];
|
||||
if (!capability.scopes.includes(scope)) {
|
||||
throw new AppError(`Provider "${provider}" does not support "${scope}" MCP scope.`, {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (!capability.transports.includes(transport)) {
|
||||
throw new AppError(`Provider "${provider}" does not support "${transport}" MCP transport.`, {
|
||||
code: 'MCP_TRANSPORT_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads one scope's raw server map from a provider-native config file.
|
||||
*/
|
||||
async function readScopedProviderServers(
|
||||
provider: LLMProvider,
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
): Promise<Record<string, unknown>> {
|
||||
switch (provider) {
|
||||
case 'claude':
|
||||
return readClaudeScopedServers(scope, workspacePath);
|
||||
case 'codex':
|
||||
return readCodexScopedServers(scope, workspacePath);
|
||||
case 'cursor':
|
||||
return readCursorScopedServers(scope, workspacePath);
|
||||
case 'gemini':
|
||||
return readGeminiScopedServers(scope, workspacePath);
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists one scope's raw server map back to provider-native config files.
|
||||
*/
|
||||
async function writeScopedProviderServers(
|
||||
provider: LLMProvider,
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
switch (provider) {
|
||||
case 'claude':
|
||||
await writeClaudeScopedServers(scope, workspacePath, servers);
|
||||
return;
|
||||
case 'codex':
|
||||
await writeCodexScopedServers(scope, workspacePath, servers);
|
||||
return;
|
||||
case 'cursor':
|
||||
await writeCursorScopedServers(scope, workspacePath, servers);
|
||||
return;
|
||||
case 'gemini':
|
||||
await writeGeminiScopedServers(scope, workspacePath, servers);
|
||||
return;
|
||||
default:
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates one provider-native server config object from unified input payload.
|
||||
*/
|
||||
function buildProviderServerConfig(provider: LLMProvider, input: UpsertMcpServerInput): Record<string, unknown> {
|
||||
const scope = input.scope ?? 'project';
|
||||
validateProviderScopeAndTransport(provider, scope, input.transport);
|
||||
|
||||
if (input.transport === 'stdio') {
|
||||
if (!input.command?.trim()) {
|
||||
throw new AppError('command is required for stdio MCP servers.', {
|
||||
code: 'MCP_COMMAND_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (provider === 'claude') {
|
||||
return {
|
||||
type: 'stdio',
|
||||
command: input.command,
|
||||
args: input.args ?? [],
|
||||
env: input.env ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
if (provider === 'codex') {
|
||||
return {
|
||||
command: input.command,
|
||||
args: input.args ?? [],
|
||||
env: input.env ?? {},
|
||||
env_vars: input.envVars ?? [],
|
||||
cwd: input.cwd,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
command: input.command,
|
||||
args: input.args ?? [],
|
||||
env: input.env ?? {},
|
||||
cwd: input.cwd,
|
||||
};
|
||||
}
|
||||
|
||||
if (!input.url?.trim()) {
|
||||
throw new AppError('url is required for http/sse MCP servers.', {
|
||||
code: 'MCP_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (provider === 'codex') {
|
||||
return {
|
||||
url: input.url,
|
||||
bearer_token_env_var: input.bearerTokenEnvVar,
|
||||
http_headers: input.headers ?? {},
|
||||
env_http_headers: input.envHttpHeaders ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
if (provider === 'cursor') {
|
||||
return {
|
||||
url: input.url,
|
||||
headers: input.headers ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
type: input.transport,
|
||||
url: input.url,
|
||||
headers: input.headers ?? {},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps one provider-native server object into the unified response shape.
|
||||
*/
|
||||
function normalizeServerConfig(
|
||||
provider: LLMProvider,
|
||||
scope: McpScope,
|
||||
name: string,
|
||||
rawConfig: unknown,
|
||||
): UnifiedMcpServer | null {
|
||||
if (!rawConfig || typeof rawConfig !== 'object') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const config = rawConfig as Record<string, unknown>;
|
||||
if (typeof config.command === 'string') {
|
||||
const transport: McpTransport = 'stdio';
|
||||
return {
|
||||
provider,
|
||||
name,
|
||||
scope,
|
||||
transport,
|
||||
command: config.command,
|
||||
args: readStringArray(config.args),
|
||||
env: readStringRecord(config.env),
|
||||
cwd: readOptionalString(config.cwd),
|
||||
envVars: readStringArray(config.env_vars),
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof config.url === 'string') {
|
||||
let transport: McpTransport = 'http';
|
||||
if (provider === 'claude' || provider === 'gemini') {
|
||||
const typeValue = readOptionalString(config.type);
|
||||
if (typeValue === 'sse') {
|
||||
transport = 'sse';
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
provider,
|
||||
name,
|
||||
scope,
|
||||
transport,
|
||||
url: config.url,
|
||||
headers: readStringRecord(config.headers) ?? readStringRecord(config.http_headers),
|
||||
bearerTokenEnvVar: readOptionalString(config.bearer_token_env_var),
|
||||
envHttpHeaders: readStringRecord(config.env_http_headers),
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Claude MCP servers from ~/.claude.json and project .mcp.json files.
|
||||
*/
|
||||
async function readClaudeScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
if (scope === 'project') {
|
||||
const filePath = path.join(workspacePath, '.mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
const filePath = path.join(os.homedir(), '.claude.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
if (scope === 'user') {
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
if (scope === 'local') {
|
||||
const projects = readObjectRecord(config.projects) ?? {};
|
||||
const projectConfig = readObjectRecord(projects[workspacePath]) ?? {};
|
||||
return readObjectRecord(projectConfig.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists Claude MCP servers back to ~/.claude.json or .mcp.json depending on scope.
|
||||
*/
|
||||
async function writeClaudeScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
if (scope === 'project') {
|
||||
const filePath = path.join(workspacePath, '.mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
return;
|
||||
}
|
||||
|
||||
const filePath = path.join(os.homedir(), '.claude.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
if (scope === 'user') {
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
return;
|
||||
}
|
||||
|
||||
const projects = readObjectRecord(config.projects) ?? {};
|
||||
const projectConfig = readObjectRecord(projects[workspacePath]) ?? {};
|
||||
projectConfig.mcpServers = servers;
|
||||
projects[workspacePath] = projectConfig;
|
||||
config.projects = projects;
|
||||
await writeJsonConfig(filePath, config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Codex MCP servers from config.toml user or project scopes.
|
||||
*/
|
||||
async function readCodexScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
if (scope === 'local') {
|
||||
throw new AppError('Codex does not support local MCP scope.', {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.codex', 'config.toml')
|
||||
: path.join(workspacePath, '.codex', 'config.toml');
|
||||
const config = await readTomlConfig(filePath);
|
||||
return readObjectRecord(config.mcp_servers) ?? {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists Codex MCP servers to config.toml user/project scopes.
|
||||
*/
|
||||
async function writeCodexScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
if (scope === 'local') {
|
||||
throw new AppError('Codex does not support local MCP scope.', {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.codex', 'config.toml')
|
||||
: path.join(workspacePath, '.codex', 'config.toml');
|
||||
const config = await readTomlConfig(filePath);
|
||||
config.mcp_servers = servers;
|
||||
await writeTomlConfig(filePath, config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Gemini MCP servers from settings.json user/project scopes.
|
||||
*/
|
||||
async function readGeminiScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
if (scope === 'local') {
|
||||
throw new AppError('Gemini does not support local MCP scope.', {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.gemini', 'settings.json')
|
||||
: path.join(workspacePath, '.gemini', 'settings.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists Gemini MCP servers to settings.json user/project scopes.
|
||||
*/
|
||||
async function writeGeminiScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
if (scope === 'local') {
|
||||
throw new AppError('Gemini does not support local MCP scope.', {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.gemini', 'settings.json')
|
||||
: path.join(workspacePath, '.gemini', 'settings.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Cursor MCP servers from mcp.json user/project scopes.
|
||||
*/
|
||||
async function readCursorScopedServers(scope: McpScope, workspacePath: string): Promise<Record<string, unknown>> {
|
||||
if (scope === 'local') {
|
||||
throw new AppError('Cursor does not support local MCP scope.', {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.cursor', 'mcp.json')
|
||||
: path.join(workspacePath, '.cursor', 'mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
return readObjectRecord(config.mcpServers) ?? {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists Cursor MCP servers to mcp.json user/project scopes.
|
||||
*/
|
||||
async function writeCursorScopedServers(
|
||||
scope: McpScope,
|
||||
workspacePath: string,
|
||||
servers: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
if (scope === 'local') {
|
||||
throw new AppError('Cursor does not support local MCP scope.', {
|
||||
code: 'MCP_SCOPE_NOT_SUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const filePath = scope === 'user'
|
||||
? path.join(os.homedir(), '.cursor', 'mcp.json')
|
||||
: path.join(workspacePath, '.cursor', 'mcp.json');
|
||||
const config = await readJsonConfig(filePath);
|
||||
config.mcpServers = servers;
|
||||
await writeJsonConfig(filePath, config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a short stdio process startup probe.
|
||||
*/
|
||||
async function runStdioServerProbe(
|
||||
server: UnifiedMcpServer,
|
||||
workspacePath: string,
|
||||
): Promise<{ reachable: boolean; error?: string }> {
|
||||
if (!server.command) {
|
||||
return { reachable: false, error: 'Missing stdio command.' };
|
||||
}
|
||||
|
||||
try {
|
||||
const child = spawn(server.command, server.args ?? [], {
|
||||
cwd: server.cwd ? path.resolve(workspacePath, server.cwd) : workspacePath,
|
||||
env: {
|
||||
...process.env,
|
||||
...(server.env ?? {}),
|
||||
},
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
if (!child.killed && child.exitCode === null) {
|
||||
child.kill('SIGTERM');
|
||||
}
|
||||
}, 1_500);
|
||||
|
||||
const errorPromise = once(child, 'error').then(([error]) => {
|
||||
throw error;
|
||||
});
|
||||
const closePromise = once(child, 'close');
|
||||
await Promise.race([closePromise, errorPromise]);
|
||||
clearTimeout(timeout);
|
||||
|
||||
if (typeof child.exitCode === 'number' && child.exitCode !== 0) {
|
||||
return {
|
||||
reachable: false,
|
||||
error: `Process exited with code ${child.exitCode}.`,
|
||||
};
|
||||
}
|
||||
|
||||
return { reachable: true };
|
||||
} catch (error) {
|
||||
return {
|
||||
reachable: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to start stdio process',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a lightweight HTTP/SSE reachability probe.
|
||||
*/
|
||||
async function runHttpServerProbe(url: string): Promise<{ reachable: boolean; statusCode?: number; error?: string }> {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 3_000);
|
||||
try {
|
||||
const response = await fetch(url, { method: 'GET', signal: controller.signal });
|
||||
clearTimeout(timeout);
|
||||
return {
|
||||
reachable: true,
|
||||
statusCode: response.status,
|
||||
};
|
||||
} catch (error) {
|
||||
clearTimeout(timeout);
|
||||
return {
|
||||
reachable: false,
|
||||
error: error instanceof Error ? error.message : 'Network probe failed',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely reads a JSON config file and returns an empty object when missing.
|
||||
*/
|
||||
async function readJsonConfig(filePath: string): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const parsed = JSON.parse(content) as Record<string, unknown>;
|
||||
return readObjectRecord(parsed) ?? {};
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes one JSON config with stable formatting.
|
||||
*/
|
||||
async function writeJsonConfig(filePath: string, data: Record<string, unknown>): Promise<void> {
|
||||
await mkdir(path.dirname(filePath), { recursive: true });
|
||||
await writeFile(filePath, `${JSON.stringify(data, null, 2)}\n`, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely reads a TOML config and returns an empty object when missing.
|
||||
*/
|
||||
async function readTomlConfig(filePath: string): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf8');
|
||||
const parsed = TOML.parse(content) as Record<string, unknown>;
|
||||
return readObjectRecord(parsed) ?? {};
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes one TOML config file.
|
||||
*/
|
||||
async function writeTomlConfig(filePath: string, data: Record<string, unknown>): Promise<void> {
|
||||
await mkdir(path.dirname(filePath), { recursive: true });
|
||||
const toml = TOML.stringify(data as any);
|
||||
await writeFile(filePath, toml, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads plain object records.
|
||||
*/
|
||||
function readObjectRecord(value: unknown): Record<string, unknown> | null {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return null;
|
||||
}
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads optional strings.
|
||||
*/
|
||||
function readOptionalString(value: unknown): string | undefined {
|
||||
if (typeof value !== 'string') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized = value.trim();
|
||||
return normalized.length ? normalized : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads optional string arrays.
|
||||
*/
|
||||
function readStringArray(value: unknown): string[] | undefined {
|
||||
if (!Array.isArray(value)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return value.filter((entry): entry is string => typeof entry === 'string');
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads optional string maps.
|
||||
*/
|
||||
function readStringRecord(value: unknown): Record<string, string> | undefined {
|
||||
const record = readObjectRecord(value);
|
||||
if (!record) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalized: Record<string, string> = {};
|
||||
for (const [key, entry] of Object.entries(record)) {
|
||||
if (typeof entry === 'string') {
|
||||
normalized[key] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(normalized).length > 0 ? normalized : undefined;
|
||||
}
|
||||
@@ -63,6 +63,17 @@ export abstract class BaseCliProvider extends AbstractProvider {
|
||||
*/
|
||||
protected abstract createCliInvocation(input: CreateCliInvocationInput): CliInvocation;
|
||||
|
||||
/**
|
||||
* Appends uploaded image paths to prompt text for CLI providers that only accept string prompts.
|
||||
*/
|
||||
protected appendImagePathsToPrompt(prompt: string, imagePaths?: string[]): string {
|
||||
if (!imagePaths || imagePaths.length === 0) {
|
||||
return prompt;
|
||||
}
|
||||
|
||||
return `${prompt}\n\n${JSON.stringify(imagePaths)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps one stdout/stderr line into either JSON or plain-text event shapes.
|
||||
*/
|
||||
|
||||
@@ -4,6 +4,8 @@ import {
|
||||
type ModelInfo,
|
||||
type Options,
|
||||
} from '@anthropic-ai/claude-agent-sdk';
|
||||
import path from 'node:path';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
|
||||
import { BaseSdkProvider } from '@/modules/llm/providers/base-sdk.provider.js';
|
||||
import type {
|
||||
@@ -19,6 +21,36 @@ type ClaudeExecutionInput = StartSessionInput & {
|
||||
};
|
||||
|
||||
const CLAUDE_THINKING_LEVELS = new Set(['low', 'medium', 'high', 'max']);
|
||||
const SUPPORTED_CLAUDE_IMAGE_TYPES = new Map<string, 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp'>([
|
||||
['.jpg', 'image/jpeg'],
|
||||
['.jpeg', 'image/jpeg'],
|
||||
['.png', 'image/png'],
|
||||
['.gif', 'image/gif'],
|
||||
['.webp', 'image/webp'],
|
||||
]);
|
||||
|
||||
type ClaudeUserPromptMessage = {
|
||||
type: 'user';
|
||||
message: {
|
||||
role: 'user';
|
||||
content: Array<
|
||||
| {
|
||||
type: 'text';
|
||||
text: string;
|
||||
}
|
||||
| {
|
||||
type: 'image';
|
||||
source: {
|
||||
type: 'base64';
|
||||
media_type: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp';
|
||||
data: string;
|
||||
};
|
||||
}
|
||||
>;
|
||||
};
|
||||
parent_tool_use_id: null;
|
||||
timestamp: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Claude SDK provider implementation.
|
||||
@@ -74,8 +106,9 @@ export class ClaudeProvider extends BaseSdkProvider {
|
||||
options.sessionId = input.sessionId;
|
||||
}
|
||||
|
||||
const promptInput = await this.buildPromptInput(input.prompt, input.imagePaths, input.workspacePath);
|
||||
const queryInstance = query({
|
||||
prompt: input.prompt,
|
||||
prompt: promptInput as any,
|
||||
options,
|
||||
});
|
||||
|
||||
@@ -91,6 +124,58 @@ export class ClaudeProvider extends BaseSdkProvider {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Claude prompt payload. When images are present, this returns an async iterable user message.
|
||||
*/
|
||||
private async buildPromptInput(
|
||||
prompt: string,
|
||||
imagePaths?: string[],
|
||||
workspacePath?: string,
|
||||
): Promise<string | AsyncIterable<ClaudeUserPromptMessage>> {
|
||||
if (!imagePaths || imagePaths.length === 0) {
|
||||
return prompt;
|
||||
}
|
||||
|
||||
const content: ClaudeUserPromptMessage['message']['content'] = [
|
||||
{ type: 'text', text: prompt },
|
||||
];
|
||||
|
||||
for (const imagePath of imagePaths) {
|
||||
const resolvedPath = path.isAbsolute(imagePath)
|
||||
? imagePath
|
||||
: path.resolve(workspacePath ?? process.cwd(), imagePath);
|
||||
const extension = path.extname(resolvedPath).toLowerCase();
|
||||
const mediaType = SUPPORTED_CLAUDE_IMAGE_TYPES.get(extension);
|
||||
if (!mediaType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const imageBytes = await readFile(resolvedPath);
|
||||
content.push({
|
||||
type: 'image',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: mediaType,
|
||||
data: imageBytes.toString('base64'),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const sdkPrompt = (async function* (): AsyncIterable<ClaudeUserPromptMessage> {
|
||||
yield {
|
||||
type: 'user',
|
||||
message: {
|
||||
role: 'user',
|
||||
content,
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
})();
|
||||
|
||||
return sdkPrompt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces compact event metadata for frontend stream rendering.
|
||||
*/
|
||||
|
||||
@@ -29,7 +29,18 @@ type CodexSdkClient = {
|
||||
|
||||
type CodexThread = {
|
||||
runStreamed: (
|
||||
prompt: string,
|
||||
prompt:
|
||||
| string
|
||||
| Array<
|
||||
| {
|
||||
type: 'text';
|
||||
text: string;
|
||||
}
|
||||
| {
|
||||
type: 'local_image';
|
||||
path: string;
|
||||
}
|
||||
>,
|
||||
options?: {
|
||||
signal?: AbortSignal;
|
||||
},
|
||||
@@ -114,7 +125,8 @@ export class CodexProvider extends BaseSdkProvider {
|
||||
: client.startThread(threadOptions);
|
||||
|
||||
const abortController = new AbortController();
|
||||
const streamedTurn = await thread.runStreamed(input.prompt, {
|
||||
const promptInput = this.buildPromptInput(input.prompt, input.imagePaths, input.workspacePath);
|
||||
const streamedTurn = await thread.runStreamed(promptInput, {
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
@@ -127,6 +139,33 @@ export class CodexProvider extends BaseSdkProvider {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds Codex prompt items. Images are sent as `local_image` entries for SDK-native image support.
|
||||
*/
|
||||
private buildPromptInput(
|
||||
prompt: string,
|
||||
imagePaths?: string[],
|
||||
workspacePath?: string,
|
||||
): string | Array<{ type: 'text'; text: string } | { type: 'local_image'; path: string }> {
|
||||
if (!imagePaths || imagePaths.length === 0) {
|
||||
return prompt;
|
||||
}
|
||||
|
||||
const resolvedImagePaths = imagePaths.map((imagePath) => (
|
||||
path.isAbsolute(imagePath)
|
||||
? imagePath
|
||||
: path.resolve(workspacePath ?? process.cwd(), imagePath)
|
||||
));
|
||||
|
||||
return [
|
||||
{ type: 'text', text: prompt },
|
||||
...resolvedImagePaths.map((resolvedPath) => ({
|
||||
type: 'local_image' as const,
|
||||
path: resolvedPath,
|
||||
})),
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes Codex stream events into the shared event shape.
|
||||
*/
|
||||
|
||||
@@ -40,6 +40,7 @@ export class CursorProvider extends BaseCliProvider {
|
||||
args: string[];
|
||||
cwd?: string;
|
||||
} {
|
||||
const promptWithImagePaths = this.appendImagePathsToPrompt(input.prompt, input.imagePaths);
|
||||
const args = ['--print', '--trust', '--output-format', 'stream-json'];
|
||||
|
||||
if (input.allowYolo) {
|
||||
@@ -54,7 +55,7 @@ export class CursorProvider extends BaseCliProvider {
|
||||
args.push('--resume', input.sessionId);
|
||||
}
|
||||
|
||||
args.push(input.prompt);
|
||||
args.push(promptWithImagePaths);
|
||||
|
||||
return {
|
||||
command: 'cursor-agent',
|
||||
|
||||
@@ -47,7 +47,8 @@ export class GeminiProvider extends BaseCliProvider {
|
||||
args: string[];
|
||||
cwd?: string;
|
||||
} {
|
||||
const args = ['--prompt', input.prompt, '--output-format', 'stream-json'];
|
||||
const promptWithImagePaths = this.appendImagePathsToPrompt(input.prompt, input.imagePaths);
|
||||
const args = ['--prompt', promptWithImagePaths, '--output-format', 'stream-json'];
|
||||
|
||||
if (input.model) {
|
||||
args.push('--model', input.model);
|
||||
|
||||
@@ -49,6 +49,7 @@ export type StartSessionInput = {
|
||||
sessionId?: string;
|
||||
model?: string;
|
||||
thinkingMode?: string;
|
||||
imagePaths?: string[];
|
||||
runtimePermissionMode?: RuntimePermissionMode;
|
||||
allowYolo?: boolean;
|
||||
};
|
||||
|
||||
396
server/src/modules/llm/skills.service.ts
Normal file
396
server/src/modules/llm/skills.service.ts
Normal file
@@ -0,0 +1,396 @@
|
||||
import { access, readFile, readdir } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import type { LLMProvider } from '@/shared/types/app.js';
|
||||
|
||||
export type SkillScope = 'user' | 'project' | 'plugin' | 'repo' | 'admin' | 'system';
|
||||
|
||||
export type UnifiedSkill = {
|
||||
provider: LLMProvider;
|
||||
scope: SkillScope;
|
||||
name: string;
|
||||
description?: string;
|
||||
invocation: string;
|
||||
filePath: string;
|
||||
pluginName?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Unified provider skills loader used by the refactor LLM module.
|
||||
*/
|
||||
export const llmSkillsService = {
|
||||
/**
|
||||
* Lists all available skills for one provider from provider-specific skill directories.
|
||||
*/
|
||||
async listProviderSkills(
|
||||
provider: LLMProvider,
|
||||
options?: { workspacePath?: string },
|
||||
): Promise<UnifiedSkill[]> {
|
||||
const workspacePath = path.resolve(options?.workspacePath ?? process.cwd());
|
||||
switch (provider) {
|
||||
case 'claude':
|
||||
return listClaudeSkills(workspacePath);
|
||||
case 'codex':
|
||||
return listCodexSkills(workspacePath);
|
||||
case 'cursor':
|
||||
return listCursorSkills(workspacePath);
|
||||
case 'gemini':
|
||||
return listGeminiSkills(workspacePath);
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads Claude user/project skills and plugin skills with plugin namespace commands.
|
||||
*/
|
||||
async function listClaudeSkills(workspacePath: string): Promise<UnifiedSkill[]> {
|
||||
const home = os.homedir();
|
||||
const skills: UnifiedSkill[] = [];
|
||||
|
||||
skills.push(
|
||||
...(await listSkillsFromDirectory({
|
||||
provider: 'claude',
|
||||
scope: 'user',
|
||||
skillsDirectory: path.join(home, '.claude', 'skills'),
|
||||
invocationPrefix: '/',
|
||||
})),
|
||||
);
|
||||
|
||||
skills.push(
|
||||
...(await listSkillsFromDirectory({
|
||||
provider: 'claude',
|
||||
scope: 'project',
|
||||
skillsDirectory: path.join(workspacePath, '.claude', 'skills'),
|
||||
invocationPrefix: '/',
|
||||
})),
|
||||
);
|
||||
|
||||
const enabledPlugins = await readClaudeEnabledPlugins();
|
||||
if (!enabledPlugins.length) {
|
||||
return skills;
|
||||
}
|
||||
|
||||
const installedPluginIndex = await readClaudeInstalledPluginIndex();
|
||||
for (const pluginId of enabledPlugins) {
|
||||
const pluginInstalls = installedPluginIndex[pluginId];
|
||||
if (!Array.isArray(pluginInstalls)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pluginNamespace = pluginId.split('@')[0] ?? pluginId;
|
||||
for (const install of pluginInstalls) {
|
||||
if (!install || typeof install !== 'object') {
|
||||
continue;
|
||||
}
|
||||
const installPath = typeof (install as Record<string, unknown>).installPath === 'string'
|
||||
? (install as Record<string, unknown>).installPath as string
|
||||
: '';
|
||||
if (!installPath) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pluginSkills = await listSkillsFromDirectory({
|
||||
provider: 'claude',
|
||||
scope: 'plugin',
|
||||
skillsDirectory: path.join(installPath, 'skills'),
|
||||
invocationPrefix: '/',
|
||||
pluginName: pluginNamespace,
|
||||
});
|
||||
|
||||
for (const skill of pluginSkills) {
|
||||
skill.invocation = `/${pluginNamespace}:${skill.name}`;
|
||||
skill.pluginName = pluginNamespace;
|
||||
}
|
||||
|
||||
skills.push(...pluginSkills);
|
||||
}
|
||||
}
|
||||
|
||||
return deduplicateSkills(skills);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Codex skills from repo/user/admin/system locations.
|
||||
*/
|
||||
async function listCodexSkills(workspacePath: string): Promise<UnifiedSkill[]> {
|
||||
const home = os.homedir();
|
||||
const repoRoot = await findGitRepoRoot(workspacePath);
|
||||
const candidateDirectories: Array<{ scope: SkillScope; directory: string }> = [
|
||||
{ scope: 'repo', directory: path.join(workspacePath, '.agents', 'skills') },
|
||||
{ scope: 'repo', directory: path.join(workspacePath, '..', '.agents', 'skills') },
|
||||
{ scope: 'user', directory: path.join(home, '.agents', 'skills') },
|
||||
{ scope: 'admin', directory: path.join(path.sep, 'etc', 'codex', 'skills') },
|
||||
{ scope: 'system', directory: path.join(home, '.codex', 'skills', '.system') },
|
||||
];
|
||||
if (repoRoot) {
|
||||
candidateDirectories.push({ scope: 'repo', directory: path.join(repoRoot, '.agents', 'skills') });
|
||||
}
|
||||
|
||||
const skills: UnifiedSkill[] = [];
|
||||
for (const candidate of deduplicateDirectories(candidateDirectories)) {
|
||||
const loadedSkills = await listSkillsFromDirectory({
|
||||
provider: 'codex',
|
||||
scope: candidate.scope,
|
||||
skillsDirectory: candidate.directory,
|
||||
invocationPrefix: '$',
|
||||
});
|
||||
skills.push(...loadedSkills);
|
||||
}
|
||||
|
||||
return deduplicateSkills(skills);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Gemini user/project skill directories.
|
||||
*/
|
||||
async function listGeminiSkills(workspacePath: string): Promise<UnifiedSkill[]> {
|
||||
const home = os.homedir();
|
||||
const candidateDirectories: Array<{ scope: SkillScope; directory: string }> = [
|
||||
{ scope: 'user', directory: path.join(home, '.gemini', 'skills') },
|
||||
{ scope: 'user', directory: path.join(home, '.agents', 'skills') },
|
||||
{ scope: 'project', directory: path.join(workspacePath, '.gemini', 'skills') },
|
||||
{ scope: 'project', directory: path.join(workspacePath, '.agents', 'skills') },
|
||||
];
|
||||
|
||||
const skills: UnifiedSkill[] = [];
|
||||
for (const candidate of deduplicateDirectories(candidateDirectories)) {
|
||||
const loadedSkills = await listSkillsFromDirectory({
|
||||
provider: 'gemini',
|
||||
scope: candidate.scope,
|
||||
skillsDirectory: candidate.directory,
|
||||
invocationPrefix: '/',
|
||||
});
|
||||
skills.push(...loadedSkills);
|
||||
}
|
||||
|
||||
return deduplicateSkills(skills);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Cursor user/project skill directories.
|
||||
*/
|
||||
async function listCursorSkills(workspacePath: string): Promise<UnifiedSkill[]> {
|
||||
const home = os.homedir();
|
||||
const candidateDirectories: Array<{ scope: SkillScope; directory: string }> = [
|
||||
{ scope: 'project', directory: path.join(workspacePath, '.agents', 'skills') },
|
||||
{ scope: 'project', directory: path.join(workspacePath, '.cursor', 'skills') },
|
||||
{ scope: 'user', directory: path.join(home, '.cursor', 'skills') },
|
||||
];
|
||||
|
||||
const skills: UnifiedSkill[] = [];
|
||||
for (const candidate of deduplicateDirectories(candidateDirectories)) {
|
||||
const loadedSkills = await listSkillsFromDirectory({
|
||||
provider: 'cursor',
|
||||
scope: candidate.scope,
|
||||
skillsDirectory: candidate.directory,
|
||||
invocationPrefix: '/',
|
||||
});
|
||||
skills.push(...loadedSkills);
|
||||
}
|
||||
|
||||
return deduplicateSkills(skills);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads SKILL.md files from a `<skills-dir>/<skill-name>/SKILL.md` directory layout.
|
||||
*/
|
||||
async function listSkillsFromDirectory(input: {
|
||||
provider: LLMProvider;
|
||||
scope: SkillScope;
|
||||
skillsDirectory: string;
|
||||
invocationPrefix: '/' | '$';
|
||||
pluginName?: string;
|
||||
}): Promise<UnifiedSkill[]> {
|
||||
if (!(await pathExists(input.skillsDirectory))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const entries = await readdir(input.skillsDirectory, { withFileTypes: true });
|
||||
const skills: UnifiedSkill[] = [];
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const skillDirectory = path.join(input.skillsDirectory, entry.name);
|
||||
const skillFilePath = path.join(skillDirectory, 'SKILL.md');
|
||||
if (!(await pathExists(skillFilePath))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const skillMarkdown = await readFile(skillFilePath, 'utf8');
|
||||
const metadata = parseSkillFrontmatter(skillMarkdown);
|
||||
const skillName = metadata.name ?? entry.name;
|
||||
const invocation = `${input.invocationPrefix}${skillName}`;
|
||||
skills.push({
|
||||
provider: input.provider,
|
||||
scope: input.scope,
|
||||
name: skillName,
|
||||
description: metadata.description,
|
||||
invocation,
|
||||
filePath: skillFilePath,
|
||||
pluginName: input.pluginName,
|
||||
});
|
||||
}
|
||||
|
||||
return skills;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses frontmatter metadata from SKILL.md files.
|
||||
*/
|
||||
function parseSkillFrontmatter(content: string): { name?: string; description?: string } {
|
||||
if (!content.startsWith('---')) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const closingDelimiterIndex = content.indexOf('\n---', 3);
|
||||
if (closingDelimiterIndex < 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const frontmatter = content.slice(3, closingDelimiterIndex).trim();
|
||||
const metadata: { name?: string; description?: string } = {};
|
||||
for (const line of frontmatter.split(/\r?\n/)) {
|
||||
const separatorIndex = line.indexOf(':');
|
||||
if (separatorIndex <= 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const key = line.slice(0, separatorIndex).trim();
|
||||
const rawValue = line.slice(separatorIndex + 1).trim();
|
||||
const value = rawValue.replace(/^["']|["']$/g, '');
|
||||
if (key === 'name') {
|
||||
metadata.name = value;
|
||||
} else if (key === 'description') {
|
||||
metadata.description = value;
|
||||
}
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Claude enabled plugin map from ~/.claude/settings.json.
|
||||
*/
|
||||
async function readClaudeEnabledPlugins(): Promise<string[]> {
|
||||
const settingsPath = path.join(os.homedir(), '.claude', 'settings.json');
|
||||
try {
|
||||
const settingsContent = await readFile(settingsPath, 'utf8');
|
||||
const settings = JSON.parse(settingsContent) as Record<string, unknown>;
|
||||
const enabledPlugins = settings.enabledPlugins;
|
||||
if (!enabledPlugins || typeof enabledPlugins !== 'object' || Array.isArray(enabledPlugins)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const enabledRecords = enabledPlugins as Record<string, unknown>;
|
||||
return Object.entries(enabledRecords)
|
||||
.filter(([, enabled]) => enabled === true)
|
||||
.map(([pluginId]) => pluginId);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return [];
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads Claude installed plugin index from ~/.claude/plugins/installed_plugins.json.
|
||||
*/
|
||||
async function readClaudeInstalledPluginIndex(): Promise<Record<string, unknown[]>> {
|
||||
const pluginIndexPath = path.join(os.homedir(), '.claude', 'plugins', 'installed_plugins.json');
|
||||
try {
|
||||
const indexContent = await readFile(pluginIndexPath, 'utf8');
|
||||
const index = JSON.parse(indexContent) as Record<string, unknown>;
|
||||
const plugins = index.plugins;
|
||||
if (!plugins || typeof plugins !== 'object' || Array.isArray(plugins)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const normalized: Record<string, unknown[]> = {};
|
||||
for (const [pluginId, entries] of Object.entries(plugins as Record<string, unknown>)) {
|
||||
normalized[pluginId] = Array.isArray(entries) ? entries : [];
|
||||
}
|
||||
|
||||
return normalized;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the closest git root by walking up from the current workspace path.
|
||||
*/
|
||||
async function findGitRepoRoot(startPath: string): Promise<string | null> {
|
||||
let currentPath = path.resolve(startPath);
|
||||
while (true) {
|
||||
const gitPath = path.join(currentPath, '.git');
|
||||
if (await pathExists(gitPath)) {
|
||||
return currentPath;
|
||||
}
|
||||
|
||||
const parentPath = path.dirname(currentPath);
|
||||
if (parentPath === currentPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
currentPath = parentPath;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deduplicates directory candidates by absolute path.
|
||||
*/
|
||||
function deduplicateDirectories(
|
||||
entries: Array<{ scope: SkillScope; directory: string }>,
|
||||
): Array<{ scope: SkillScope; directory: string }> {
|
||||
const seen = new Set<string>();
|
||||
const deduplicated: Array<{ scope: SkillScope; directory: string }> = [];
|
||||
for (const entry of entries) {
|
||||
const normalizedDirectory = path.resolve(entry.directory);
|
||||
if (seen.has(normalizedDirectory)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(normalizedDirectory);
|
||||
deduplicated.push({ scope: entry.scope, directory: normalizedDirectory });
|
||||
}
|
||||
|
||||
return deduplicated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deduplicates skills by provider + invocation command.
|
||||
*/
|
||||
function deduplicateSkills(skills: UnifiedSkill[]): UnifiedSkill[] {
|
||||
const seen = new Set<string>();
|
||||
const deduplicated: UnifiedSkill[] = [];
|
||||
for (const skill of skills) {
|
||||
const key = `${skill.provider}:${skill.invocation}`;
|
||||
if (seen.has(key)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(key);
|
||||
deduplicated.push(skill);
|
||||
}
|
||||
|
||||
return deduplicated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether a path exists.
|
||||
*/
|
||||
async function pathExists(targetPath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(targetPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user