mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-05-16 01:12:46 +00:00
refactor(backend): move every route to its own module
This commit is contained in:
1246
server/src/modules/agent/agent.routes.js
Normal file
1246
server/src/modules/agent/agent.routes.js
Normal file
File diff suppressed because it is too large
Load Diff
86
server/src/modules/api-keys/api-keys.routes.js
Normal file
86
server/src/modules/api-keys/api-keys.routes.js
Normal file
@@ -0,0 +1,86 @@
|
||||
import express from 'express';
|
||||
import { apiKeysDb } from '../../../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// ===============================
|
||||
// API Keys Management
|
||||
// ===============================
|
||||
|
||||
// Get all API keys for the authenticated user
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const apiKeys = apiKeysDb.getApiKeys(req.user.id);
|
||||
// Don't send the full API key in the list for security
|
||||
const sanitizedKeys = apiKeys.map(key => ({
|
||||
...key,
|
||||
api_key: key.api_key.substring(0, 10) + '...'
|
||||
}));
|
||||
res.json({ apiKeys: sanitizedKeys });
|
||||
} catch (error) {
|
||||
console.error('Error fetching API keys:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch API keys' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new API key
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const { keyName } = req.body;
|
||||
|
||||
if (!keyName || !keyName.trim()) {
|
||||
return res.status(400).json({ error: 'Key name is required' });
|
||||
}
|
||||
|
||||
const result = apiKeysDb.createApiKey(req.user.id, keyName.trim());
|
||||
res.json({
|
||||
success: true,
|
||||
apiKey: result
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error creating API key:', error);
|
||||
res.status(500).json({ error: 'Failed to create API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete an API key
|
||||
router.delete('/:keyId', async (req, res) => {
|
||||
try {
|
||||
const { keyId } = req.params;
|
||||
const success = apiKeysDb.deleteApiKey(req.user.id, parseInt(keyId));
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error deleting API key:', error);
|
||||
res.status(500).json({ error: 'Failed to delete API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// Toggle API key active status
|
||||
router.patch('/:keyId/toggle', async (req, res) => {
|
||||
try {
|
||||
const { keyId } = req.params;
|
||||
const { isActive } = req.body;
|
||||
|
||||
if (typeof isActive !== 'boolean') {
|
||||
return res.status(400).json({ error: 'isActive must be a boolean' });
|
||||
}
|
||||
|
||||
const success = apiKeysDb.toggleApiKey(req.user.id, parseInt(keyId), isActive);
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error toggling API key:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle API key' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
434
server/src/modules/cli-auth/cli-auth.routes.js
Normal file
434
server/src/modules/cli-auth/cli-auth.routes.js
Normal file
@@ -0,0 +1,434 @@
|
||||
import express from 'express';
|
||||
import { spawn } from 'child_process';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/claude/status', async (req, res) => {
|
||||
try {
|
||||
const credentialsResult = await checkClaudeCredentials();
|
||||
|
||||
if (credentialsResult.authenticated) {
|
||||
return res.json({
|
||||
authenticated: true,
|
||||
email: credentialsResult.email || 'Authenticated',
|
||||
method: credentialsResult.method // 'api_key' or 'credentials_file'
|
||||
});
|
||||
}
|
||||
|
||||
return res.json({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: credentialsResult.error || 'Not authenticated'
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error checking Claude auth status:', error);
|
||||
res.status(500).json({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/cursor/status', async (req, res) => {
|
||||
try {
|
||||
const result = await checkCursorStatus();
|
||||
|
||||
res.json({
|
||||
authenticated: result.authenticated,
|
||||
email: result.email,
|
||||
error: result.error
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error checking Cursor auth status:', error);
|
||||
res.status(500).json({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/codex/status', async (req, res) => {
|
||||
try {
|
||||
const result = await checkCodexCredentials();
|
||||
|
||||
res.json({
|
||||
authenticated: result.authenticated,
|
||||
email: result.email,
|
||||
error: result.error
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error checking Codex auth status:', error);
|
||||
res.status(500).json({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/gemini/status', async (req, res) => {
|
||||
try {
|
||||
const result = await checkGeminiCredentials();
|
||||
|
||||
res.json({
|
||||
authenticated: result.authenticated,
|
||||
email: result.email,
|
||||
error: result.error
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error checking Gemini auth status:', error);
|
||||
res.status(500).json({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
async function loadClaudeSettingsEnv() {
|
||||
try {
|
||||
const settingsPath = path.join(os.homedir(), '.claude', 'settings.json');
|
||||
const content = await fs.readFile(settingsPath, 'utf8');
|
||||
const settings = JSON.parse(content);
|
||||
|
||||
if (settings?.env && typeof settings.env === 'object') {
|
||||
return settings.env;
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore missing or malformed settings and fall back to other auth sources.
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks Claude authentication credentials using two methods with priority order:
|
||||
*
|
||||
* Priority 1: ANTHROPIC_API_KEY environment variable
|
||||
* Priority 1b: ~/.claude/settings.json env values
|
||||
* Priority 2: ~/.claude/.credentials.json OAuth tokens
|
||||
*
|
||||
* The Claude Agent SDK prioritizes environment variables over authenticated subscriptions.
|
||||
* This matching behavior ensures consistency with how the SDK authenticates.
|
||||
*
|
||||
* References:
|
||||
* - https://support.claude.com/en/articles/12304248-managing-api-key-environment-variables-in-claude-code
|
||||
* "Claude Code prioritizes environment variable API keys over authenticated subscriptions"
|
||||
* - https://platform.claude.com/docs/en/agent-sdk/overview
|
||||
* SDK authentication documentation
|
||||
*
|
||||
* @returns {Promise<Object>} Authentication status with { authenticated, email, method }
|
||||
* - authenticated: boolean indicating if valid credentials exist
|
||||
* - email: user email or auth method identifier
|
||||
* - method: 'api_key' for env var, 'credentials_file' for OAuth tokens
|
||||
*/
|
||||
async function checkClaudeCredentials() {
|
||||
// Priority 1: Check for ANTHROPIC_API_KEY environment variable
|
||||
// The SDK checks this first and uses it if present, even if OAuth tokens exist.
|
||||
// When set, API calls are charged via pay-as-you-go rates instead of subscription.
|
||||
if (process.env.ANTHROPIC_API_KEY && process.env.ANTHROPIC_API_KEY.trim()) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: 'API Key Auth',
|
||||
method: 'api_key'
|
||||
};
|
||||
}
|
||||
|
||||
// Priority 1b: Check ~/.claude/settings.json env values.
|
||||
// Claude Code can read proxy/auth values from settings.json even when the
|
||||
// CloudCLI server process itself was not started with those env vars exported.
|
||||
const settingsEnv = await loadClaudeSettingsEnv();
|
||||
|
||||
if (typeof settingsEnv.ANTHROPIC_API_KEY === 'string' && settingsEnv.ANTHROPIC_API_KEY.trim()) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: 'API Key Auth',
|
||||
method: 'api_key'
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof settingsEnv.ANTHROPIC_AUTH_TOKEN === 'string' && settingsEnv.ANTHROPIC_AUTH_TOKEN.trim()) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: 'Configured via settings.json',
|
||||
method: 'api_key'
|
||||
};
|
||||
}
|
||||
|
||||
// Priority 2: Check ~/.claude/.credentials.json for OAuth tokens
|
||||
// This is the standard authentication method used by Claude CLI after running
|
||||
// 'claude /login' or 'claude setup-token' commands.
|
||||
try {
|
||||
const credPath = path.join(os.homedir(), '.claude', '.credentials.json');
|
||||
const content = await fs.readFile(credPath, 'utf8');
|
||||
const creds = JSON.parse(content);
|
||||
|
||||
const oauth = creds.claudeAiOauth;
|
||||
if (oauth && oauth.accessToken) {
|
||||
const isExpired = oauth.expiresAt && Date.now() >= oauth.expiresAt;
|
||||
|
||||
if (!isExpired) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: creds.email || creds.user || null,
|
||||
method: 'credentials_file'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
method: null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function checkCursorStatus() {
|
||||
return new Promise((resolve) => {
|
||||
let processCompleted = false;
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
if (!processCompleted) {
|
||||
processCompleted = true;
|
||||
if (childProcess) {
|
||||
childProcess.kill();
|
||||
}
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Command timeout'
|
||||
});
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
let childProcess;
|
||||
try {
|
||||
childProcess = spawn('cursor-agent', ['status']);
|
||||
} catch (err) {
|
||||
clearTimeout(timeout);
|
||||
processCompleted = true;
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Cursor CLI not found or not installed'
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
childProcess.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
childProcess.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
childProcess.on('close', (code) => {
|
||||
if (processCompleted) return;
|
||||
processCompleted = true;
|
||||
clearTimeout(timeout);
|
||||
|
||||
if (code === 0) {
|
||||
const emailMatch = stdout.match(/Logged in as ([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,})/i);
|
||||
|
||||
if (emailMatch) {
|
||||
resolve({
|
||||
authenticated: true,
|
||||
email: emailMatch[1],
|
||||
output: stdout
|
||||
});
|
||||
} else if (stdout.includes('Logged in')) {
|
||||
resolve({
|
||||
authenticated: true,
|
||||
email: 'Logged in',
|
||||
output: stdout
|
||||
});
|
||||
} else {
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Not logged in'
|
||||
});
|
||||
}
|
||||
} else {
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: stderr || 'Not logged in'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
childProcess.on('error', (err) => {
|
||||
if (processCompleted) return;
|
||||
processCompleted = true;
|
||||
clearTimeout(timeout);
|
||||
|
||||
resolve({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Cursor CLI not found or not installed'
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function checkCodexCredentials() {
|
||||
try {
|
||||
const authPath = path.join(os.homedir(), '.codex', 'auth.json');
|
||||
const content = await fs.readFile(authPath, 'utf8');
|
||||
const auth = JSON.parse(content);
|
||||
|
||||
// Tokens are nested under 'tokens' key
|
||||
const tokens = auth.tokens || {};
|
||||
|
||||
// Check for valid tokens (id_token or access_token)
|
||||
if (tokens.id_token || tokens.access_token) {
|
||||
// Try to extract email from id_token JWT payload
|
||||
let email = 'Authenticated';
|
||||
if (tokens.id_token) {
|
||||
try {
|
||||
// JWT is base64url encoded: header.payload.signature
|
||||
const parts = tokens.id_token.split('.');
|
||||
if (parts.length >= 2) {
|
||||
// Decode the payload (second part)
|
||||
const payload = JSON.parse(Buffer.from(parts[1], 'base64url').toString('utf8'));
|
||||
email = payload.email || payload.user || 'Authenticated';
|
||||
}
|
||||
} catch {
|
||||
// If JWT decoding fails, use fallback
|
||||
email = 'Authenticated';
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: true,
|
||||
email
|
||||
};
|
||||
}
|
||||
|
||||
// Also check for OPENAI_API_KEY as fallback auth method
|
||||
if (auth.OPENAI_API_KEY) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: 'API Key Auth'
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'No valid tokens found'
|
||||
};
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Codex not configured'
|
||||
};
|
||||
}
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function checkGeminiCredentials() {
|
||||
if (process.env.GEMINI_API_KEY && process.env.GEMINI_API_KEY.trim()) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: 'API Key Auth'
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const credsPath = path.join(os.homedir(), '.gemini', 'oauth_creds.json');
|
||||
const content = await fs.readFile(credsPath, 'utf8');
|
||||
const creds = JSON.parse(content);
|
||||
|
||||
if (creds.access_token) {
|
||||
let email = 'OAuth Session';
|
||||
|
||||
try {
|
||||
// Validate token against Google API
|
||||
const tokenRes = await fetch(`https://oauth2.googleapis.com/tokeninfo?access_token=${creds.access_token}`);
|
||||
if (tokenRes.ok) {
|
||||
const tokenInfo = await tokenRes.json();
|
||||
if (tokenInfo.email) {
|
||||
email = tokenInfo.email;
|
||||
}
|
||||
} else if (!creds.refresh_token) {
|
||||
// Token invalid and no refresh token available
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Access token invalid and no refresh token found'
|
||||
};
|
||||
} else {
|
||||
// Token might be expired but we have a refresh token, so CLI will refresh it
|
||||
try {
|
||||
const accPath = path.join(os.homedir(), '.gemini', 'google_accounts.json');
|
||||
const accContent = await fs.readFile(accPath, 'utf8');
|
||||
const accounts = JSON.parse(accContent);
|
||||
if (accounts.active) {
|
||||
email = accounts.active;
|
||||
}
|
||||
} catch (e) { }
|
||||
}
|
||||
} catch (e) {
|
||||
// Network error, fallback to checking local accounts file
|
||||
try {
|
||||
const accPath = path.join(os.homedir(), '.gemini', 'google_accounts.json');
|
||||
const accContent = await fs.readFile(accPath, 'utf8');
|
||||
const accounts = JSON.parse(accContent);
|
||||
if (accounts.active) {
|
||||
email = accounts.active;
|
||||
}
|
||||
} catch (err) { }
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: true,
|
||||
email: email
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'No valid tokens found in oauth_creds'
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Gemini CLI not configured'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default router;
|
||||
329
server/src/modules/codex/codex.routes.js
Normal file
329
server/src/modules/codex/codex.routes.js
Normal file
@@ -0,0 +1,329 @@
|
||||
import express from 'express';
|
||||
import { spawn } from 'child_process';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import TOML from '@iarna/toml';
|
||||
import { getCodexSessions, deleteCodexSession } from '../../../projects.js';
|
||||
import { applyCustomSessionNames, sessionNamesDb } from '../../../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
function createCliResponder(res) {
|
||||
let responded = false;
|
||||
return (status, payload) => {
|
||||
if (responded || res.headersSent) {
|
||||
return;
|
||||
}
|
||||
responded = true;
|
||||
res.status(status).json(payload);
|
||||
};
|
||||
}
|
||||
|
||||
router.get('/config', async (req, res) => {
|
||||
try {
|
||||
const configPath = path.join(os.homedir(), '.codex', 'config.toml');
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
const config = TOML.parse(content);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
config: {
|
||||
model: config.model || null,
|
||||
mcpServers: config.mcp_servers || {},
|
||||
approvalMode: config.approval_mode || 'suggest'
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
res.json({
|
||||
success: true,
|
||||
config: {
|
||||
model: null,
|
||||
mcpServers: {},
|
||||
approvalMode: 'suggest'
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.error('Error reading Codex config:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/sessions', async (req, res) => {
|
||||
try {
|
||||
const { projectPath } = req.query;
|
||||
|
||||
if (!projectPath) {
|
||||
return res.status(400).json({ success: false, error: 'projectPath query parameter required' });
|
||||
}
|
||||
|
||||
const sessions = await getCodexSessions(projectPath);
|
||||
applyCustomSessionNames(sessions, 'codex');
|
||||
res.json({ success: true, sessions });
|
||||
} catch (error) {
|
||||
console.error('Error fetching Codex sessions:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.delete('/sessions/:sessionId', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
await deleteCodexSession(sessionId);
|
||||
sessionNamesDb.deleteName(sessionId, 'codex');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`Error deleting Codex session ${req.params.sessionId}:`, error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// MCP Server Management Routes
|
||||
|
||||
router.get('/mcp/cli/list', async (req, res) => {
|
||||
try {
|
||||
const respond = createCliResponder(res);
|
||||
const proc = spawn('codex', ['mcp', 'list'], { stdio: ['pipe', 'pipe', 'pipe'] });
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
proc.stdout?.on('data', (data) => { stdout += data.toString(); });
|
||||
proc.stderr?.on('data', (data) => { stderr += data.toString(); });
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
respond(200, { success: true, output: stdout, servers: parseCodexListOutput(stdout) });
|
||||
} else {
|
||||
respond(500, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` });
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', (error) => {
|
||||
const isMissing = error?.code === 'ENOENT';
|
||||
respond(isMissing ? 503 : 500, {
|
||||
error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI',
|
||||
details: error.message,
|
||||
code: error.code
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: 'Failed to list MCP servers', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/mcp/cli/add', async (req, res) => {
|
||||
try {
|
||||
const { name, command, args = [], env = {} } = req.body;
|
||||
|
||||
if (!name || !command) {
|
||||
return res.status(400).json({ error: 'name and command are required' });
|
||||
}
|
||||
|
||||
// Build: codex mcp add <name> [-e KEY=VAL]... -- <command> [args...]
|
||||
let cliArgs = ['mcp', 'add', name];
|
||||
|
||||
Object.entries(env).forEach(([key, value]) => {
|
||||
cliArgs.push('-e', `${key}=${value}`);
|
||||
});
|
||||
|
||||
cliArgs.push('--', command);
|
||||
|
||||
if (args && args.length > 0) {
|
||||
cliArgs.push(...args);
|
||||
}
|
||||
|
||||
const respond = createCliResponder(res);
|
||||
const proc = spawn('codex', cliArgs, { stdio: ['pipe', 'pipe', 'pipe'] });
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
proc.stdout?.on('data', (data) => { stdout += data.toString(); });
|
||||
proc.stderr?.on('data', (data) => { stderr += data.toString(); });
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
respond(200, { success: true, output: stdout, message: `MCP server "${name}" added successfully` });
|
||||
} else {
|
||||
respond(400, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` });
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', (error) => {
|
||||
const isMissing = error?.code === 'ENOENT';
|
||||
respond(isMissing ? 503 : 500, {
|
||||
error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI',
|
||||
details: error.message,
|
||||
code: error.code
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: 'Failed to add MCP server', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.delete('/mcp/cli/remove/:name', async (req, res) => {
|
||||
try {
|
||||
const { name } = req.params;
|
||||
|
||||
const respond = createCliResponder(res);
|
||||
const proc = spawn('codex', ['mcp', 'remove', name], { stdio: ['pipe', 'pipe', 'pipe'] });
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
proc.stdout?.on('data', (data) => { stdout += data.toString(); });
|
||||
proc.stderr?.on('data', (data) => { stderr += data.toString(); });
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
respond(200, { success: true, output: stdout, message: `MCP server "${name}" removed successfully` });
|
||||
} else {
|
||||
respond(400, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` });
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', (error) => {
|
||||
const isMissing = error?.code === 'ENOENT';
|
||||
respond(isMissing ? 503 : 500, {
|
||||
error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI',
|
||||
details: error.message,
|
||||
code: error.code
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: 'Failed to remove MCP server', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/mcp/cli/get/:name', async (req, res) => {
|
||||
try {
|
||||
const { name } = req.params;
|
||||
|
||||
const respond = createCliResponder(res);
|
||||
const proc = spawn('codex', ['mcp', 'get', name], { stdio: ['pipe', 'pipe', 'pipe'] });
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
proc.stdout?.on('data', (data) => { stdout += data.toString(); });
|
||||
proc.stderr?.on('data', (data) => { stderr += data.toString(); });
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
respond(200, { success: true, output: stdout, server: parseCodexGetOutput(stdout) });
|
||||
} else {
|
||||
respond(404, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` });
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', (error) => {
|
||||
const isMissing = error?.code === 'ENOENT';
|
||||
respond(isMissing ? 503 : 500, {
|
||||
error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI',
|
||||
details: error.message,
|
||||
code: error.code
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: 'Failed to get MCP server details', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/mcp/config/read', async (req, res) => {
|
||||
try {
|
||||
const configPath = path.join(os.homedir(), '.codex', 'config.toml');
|
||||
|
||||
let configData = null;
|
||||
|
||||
try {
|
||||
const fileContent = await fs.readFile(configPath, 'utf8');
|
||||
configData = TOML.parse(fileContent);
|
||||
} catch (error) {
|
||||
// Config file doesn't exist
|
||||
}
|
||||
|
||||
if (!configData) {
|
||||
return res.json({ success: true, configPath, servers: [] }); }
|
||||
|
||||
const servers = [];
|
||||
|
||||
if (configData.mcp_servers && typeof configData.mcp_servers === 'object') {
|
||||
for (const [name, config] of Object.entries(configData.mcp_servers)) {
|
||||
servers.push({
|
||||
id: name,
|
||||
name: name,
|
||||
type: 'stdio',
|
||||
scope: 'user',
|
||||
config: {
|
||||
command: config.command || '',
|
||||
args: config.args || [],
|
||||
env: config.env || {}
|
||||
},
|
||||
raw: config
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, configPath, servers });
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: 'Failed to read Codex configuration', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
function parseCodexListOutput(output) {
|
||||
const servers = [];
|
||||
const lines = output.split('\n').filter(line => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.includes(':')) {
|
||||
const colonIndex = line.indexOf(':');
|
||||
const name = line.substring(0, colonIndex).trim();
|
||||
|
||||
if (!name) continue;
|
||||
|
||||
const rest = line.substring(colonIndex + 1).trim();
|
||||
let description = rest;
|
||||
let status = 'unknown';
|
||||
|
||||
if (rest.includes('✓') || rest.includes('✗')) {
|
||||
const statusMatch = rest.match(/(.*?)\s*-\s*([✓✗].*)$/);
|
||||
if (statusMatch) {
|
||||
description = statusMatch[1].trim();
|
||||
status = statusMatch[2].includes('✓') ? 'connected' : 'failed';
|
||||
}
|
||||
}
|
||||
|
||||
servers.push({ name, type: 'stdio', status, description });
|
||||
}
|
||||
}
|
||||
|
||||
return servers;
|
||||
}
|
||||
|
||||
function parseCodexGetOutput(output) {
|
||||
try {
|
||||
const jsonMatch = output.match(/\{[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
return JSON.parse(jsonMatch[0]);
|
||||
}
|
||||
|
||||
const server = { raw_output: output };
|
||||
const lines = output.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.includes('Name:')) server.name = line.split(':')[1]?.trim();
|
||||
else if (line.includes('Type:')) server.type = line.split(':')[1]?.trim();
|
||||
else if (line.includes('Command:')) server.command = line.split(':')[1]?.trim();
|
||||
}
|
||||
|
||||
return server;
|
||||
} catch (error) {
|
||||
return { raw_output: output, parse_error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
export default router;
|
||||
601
server/src/modules/commands/commands.routes.js
Normal file
601
server/src/modules/commands/commands.routes.js
Normal file
@@ -0,0 +1,601 @@
|
||||
import express from 'express';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import os from 'os';
|
||||
import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../../../shared/modelConstants.js';
|
||||
import { parseFrontmatter } from '../../../utils/frontmatter.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* Recursively scan directory for command files (.md)
|
||||
* @param {string} dir - Directory to scan
|
||||
* @param {string} baseDir - Base directory for relative paths
|
||||
* @param {string} namespace - Namespace for commands (e.g., 'project', 'user')
|
||||
* @returns {Promise<Array>} Array of command objects
|
||||
*/
|
||||
async function scanCommandsDirectory(dir, baseDir, namespace) {
|
||||
const commands = [];
|
||||
|
||||
try {
|
||||
// Check if directory exists
|
||||
await fs.access(dir);
|
||||
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
// Recursively scan subdirectories
|
||||
const subCommands = await scanCommandsDirectory(fullPath, baseDir, namespace);
|
||||
commands.push(...subCommands);
|
||||
} else if (entry.isFile() && entry.name.endsWith('.md')) {
|
||||
// Parse markdown file for metadata
|
||||
try {
|
||||
const content = await fs.readFile(fullPath, 'utf8');
|
||||
const { data: frontmatter, content: commandContent } = parseFrontmatter(content);
|
||||
|
||||
// Calculate relative path from baseDir for command name
|
||||
const relativePath = path.relative(baseDir, fullPath);
|
||||
// Remove .md extension and convert to command name
|
||||
const commandName = '/' + relativePath.replace(/\.md$/, '').replace(/\\/g, '/');
|
||||
|
||||
// Extract description from frontmatter or first line of content
|
||||
let description = frontmatter.description || '';
|
||||
if (!description) {
|
||||
const firstLine = commandContent.trim().split('\n')[0];
|
||||
description = firstLine.replace(/^#+\s*/, '').trim();
|
||||
}
|
||||
|
||||
commands.push({
|
||||
name: commandName,
|
||||
path: fullPath,
|
||||
relativePath,
|
||||
description,
|
||||
namespace,
|
||||
metadata: frontmatter
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(`Error parsing command file ${fullPath}:`, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// Directory doesn't exist or can't be accessed - this is okay
|
||||
if (err.code !== 'ENOENT' && err.code !== 'EACCES') {
|
||||
console.error(`Error scanning directory ${dir}:`, err.message);
|
||||
}
|
||||
}
|
||||
|
||||
return commands;
|
||||
}
|
||||
|
||||
/**
|
||||
* Built-in commands that are always available
|
||||
*/
|
||||
const builtInCommands = [
|
||||
{
|
||||
name: '/help',
|
||||
description: 'Show help documentation for Claude Code',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
},
|
||||
{
|
||||
name: '/clear',
|
||||
description: 'Clear the conversation history',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
},
|
||||
{
|
||||
name: '/model',
|
||||
description: 'Switch or view the current AI model',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
},
|
||||
{
|
||||
name: '/cost',
|
||||
description: 'Display token usage and cost information',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
},
|
||||
{
|
||||
name: '/memory',
|
||||
description: 'Open CLAUDE.md memory file for editing',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
},
|
||||
{
|
||||
name: '/config',
|
||||
description: 'Open settings and configuration',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
},
|
||||
{
|
||||
name: '/status',
|
||||
description: 'Show system status and version information',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
},
|
||||
{
|
||||
name: '/rewind',
|
||||
description: 'Rewind the conversation to a previous state',
|
||||
namespace: 'builtin',
|
||||
metadata: { type: 'builtin' }
|
||||
}
|
||||
];
|
||||
|
||||
/**
|
||||
* Built-in command handlers
|
||||
* Each handler returns { type: 'builtin', action: string, data: any }
|
||||
*/
|
||||
const builtInHandlers = {
|
||||
'/help': async (args, context) => {
|
||||
const helpText = `# Claude Code Commands
|
||||
|
||||
## Built-in Commands
|
||||
|
||||
${builtInCommands.map(cmd => `### ${cmd.name}
|
||||
${cmd.description}
|
||||
`).join('\n')}
|
||||
|
||||
## Custom Commands
|
||||
|
||||
Custom commands can be created in:
|
||||
- Project: \`.claude/commands/\` (project-specific)
|
||||
- User: \`~/.claude/commands/\` (available in all projects)
|
||||
|
||||
### Command Syntax
|
||||
|
||||
- **Arguments**: Use \`$ARGUMENTS\` for all args or \`$1\`, \`$2\`, etc. for positional
|
||||
- **File Includes**: Use \`@filename\` to include file contents
|
||||
- **Bash Commands**: Use \`!command\` to execute bash commands
|
||||
|
||||
### Examples
|
||||
|
||||
\`\`\`markdown
|
||||
/mycommand arg1 arg2
|
||||
\`\`\`
|
||||
`;
|
||||
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'help',
|
||||
data: {
|
||||
content: helpText,
|
||||
format: 'markdown'
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
'/clear': async (args, context) => {
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'clear',
|
||||
data: {
|
||||
message: 'Conversation history cleared'
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
'/model': async (args, context) => {
|
||||
// Read available models from centralized constants
|
||||
const availableModels = {
|
||||
claude: CLAUDE_MODELS.OPTIONS.map(o => o.value),
|
||||
cursor: CURSOR_MODELS.OPTIONS.map(o => o.value),
|
||||
codex: CODEX_MODELS.OPTIONS.map(o => o.value)
|
||||
};
|
||||
|
||||
const currentProvider = context?.provider || 'claude';
|
||||
const currentModel = context?.model || CLAUDE_MODELS.DEFAULT;
|
||||
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'model',
|
||||
data: {
|
||||
current: {
|
||||
provider: currentProvider,
|
||||
model: currentModel
|
||||
},
|
||||
available: availableModels,
|
||||
message: args.length > 0
|
||||
? `Switching to model: ${args[0]}`
|
||||
: `Current model: ${currentModel}`
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
'/cost': async (args, context) => {
|
||||
const tokenUsage = context?.tokenUsage || {};
|
||||
const provider = context?.provider || 'claude';
|
||||
const model =
|
||||
context?.model ||
|
||||
(provider === 'cursor'
|
||||
? CURSOR_MODELS.DEFAULT
|
||||
: provider === 'codex'
|
||||
? CODEX_MODELS.DEFAULT
|
||||
: CLAUDE_MODELS.DEFAULT);
|
||||
|
||||
const used = Number(tokenUsage.used ?? tokenUsage.totalUsed ?? tokenUsage.total_tokens ?? 0) || 0;
|
||||
const total =
|
||||
Number(
|
||||
tokenUsage.total ??
|
||||
tokenUsage.contextWindow ??
|
||||
parseInt(process.env.CONTEXT_WINDOW || '160000', 10),
|
||||
) || 160000;
|
||||
const percentage = total > 0 ? Number(((used / total) * 100).toFixed(1)) : 0;
|
||||
|
||||
const inputTokensRaw =
|
||||
Number(
|
||||
tokenUsage.inputTokens ??
|
||||
tokenUsage.input ??
|
||||
tokenUsage.cumulativeInputTokens ??
|
||||
tokenUsage.promptTokens ??
|
||||
0,
|
||||
) || 0;
|
||||
const outputTokens =
|
||||
Number(
|
||||
tokenUsage.outputTokens ??
|
||||
tokenUsage.output ??
|
||||
tokenUsage.cumulativeOutputTokens ??
|
||||
tokenUsage.completionTokens ??
|
||||
0,
|
||||
) || 0;
|
||||
const cacheTokens =
|
||||
Number(
|
||||
tokenUsage.cacheReadTokens ??
|
||||
tokenUsage.cacheCreationTokens ??
|
||||
tokenUsage.cacheTokens ??
|
||||
tokenUsage.cachedTokens ??
|
||||
0,
|
||||
) || 0;
|
||||
|
||||
// If we only have total used tokens, treat them as input for display/estimation.
|
||||
const inputTokens =
|
||||
inputTokensRaw > 0 || outputTokens > 0 || cacheTokens > 0 ? inputTokensRaw + cacheTokens : used;
|
||||
|
||||
// Rough default rates by provider (USD / 1M tokens).
|
||||
const pricingByProvider = {
|
||||
claude: { input: 3, output: 15 },
|
||||
cursor: { input: 3, output: 15 },
|
||||
codex: { input: 1.5, output: 6 },
|
||||
};
|
||||
const rates = pricingByProvider[provider] || pricingByProvider.claude;
|
||||
|
||||
const inputCost = (inputTokens / 1_000_000) * rates.input;
|
||||
const outputCost = (outputTokens / 1_000_000) * rates.output;
|
||||
const totalCost = inputCost + outputCost;
|
||||
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'cost',
|
||||
data: {
|
||||
tokenUsage: {
|
||||
used,
|
||||
total,
|
||||
percentage,
|
||||
},
|
||||
cost: {
|
||||
input: inputCost.toFixed(4),
|
||||
output: outputCost.toFixed(4),
|
||||
total: totalCost.toFixed(4),
|
||||
},
|
||||
model,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
'/status': async (args, context) => {
|
||||
// Read version from package.json
|
||||
const packageJsonPath = path.join(path.dirname(__dirname), '..', 'package.json');
|
||||
let version = 'unknown';
|
||||
let packageName = 'claude-code-ui';
|
||||
|
||||
try {
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
version = packageJson.version;
|
||||
packageName = packageJson.name;
|
||||
} catch (err) {
|
||||
console.error('Error reading package.json:', err);
|
||||
}
|
||||
|
||||
const uptime = process.uptime();
|
||||
const uptimeMinutes = Math.floor(uptime / 60);
|
||||
const uptimeHours = Math.floor(uptimeMinutes / 60);
|
||||
const uptimeFormatted = uptimeHours > 0
|
||||
? `${uptimeHours}h ${uptimeMinutes % 60}m`
|
||||
: `${uptimeMinutes}m`;
|
||||
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'status',
|
||||
data: {
|
||||
version,
|
||||
packageName,
|
||||
uptime: uptimeFormatted,
|
||||
uptimeSeconds: Math.floor(uptime),
|
||||
model: context?.model || 'claude-sonnet-4.5',
|
||||
provider: context?.provider || 'claude',
|
||||
nodeVersion: process.version,
|
||||
platform: process.platform
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
'/memory': async (args, context) => {
|
||||
const projectPath = context?.projectPath;
|
||||
|
||||
if (!projectPath) {
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'memory',
|
||||
data: {
|
||||
error: 'No project selected',
|
||||
message: 'Please select a project to access its CLAUDE.md file'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const claudeMdPath = path.join(projectPath, 'CLAUDE.md');
|
||||
|
||||
// Check if CLAUDE.md exists
|
||||
let exists = false;
|
||||
try {
|
||||
await fs.access(claudeMdPath);
|
||||
exists = true;
|
||||
} catch (err) {
|
||||
// File doesn't exist
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'memory',
|
||||
data: {
|
||||
path: claudeMdPath,
|
||||
exists,
|
||||
message: exists
|
||||
? `Opening CLAUDE.md at ${claudeMdPath}`
|
||||
: `CLAUDE.md not found at ${claudeMdPath}. Create it to store project-specific instructions.`
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
'/config': async (args, context) => {
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'config',
|
||||
data: {
|
||||
message: 'Opening settings...'
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
'/rewind': async (args, context) => {
|
||||
const steps = args[0] ? parseInt(args[0]) : 1;
|
||||
|
||||
if (isNaN(steps) || steps < 1) {
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'rewind',
|
||||
data: {
|
||||
error: 'Invalid steps parameter',
|
||||
message: 'Usage: /rewind [number] - Rewind conversation by N steps (default: 1)'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'builtin',
|
||||
action: 'rewind',
|
||||
data: {
|
||||
steps,
|
||||
message: `Rewinding conversation by ${steps} step${steps > 1 ? 's' : ''}...`
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* POST /api/commands/list
|
||||
* List all available commands from project and user directories
|
||||
*/
|
||||
router.post('/list', async (req, res) => {
|
||||
try {
|
||||
const { projectPath } = req.body;
|
||||
const allCommands = [...builtInCommands];
|
||||
|
||||
// Scan project-level commands (.claude/commands/)
|
||||
if (projectPath) {
|
||||
const projectCommandsDir = path.join(projectPath, '.claude', 'commands');
|
||||
const projectCommands = await scanCommandsDirectory(
|
||||
projectCommandsDir,
|
||||
projectCommandsDir,
|
||||
'project'
|
||||
);
|
||||
allCommands.push(...projectCommands);
|
||||
}
|
||||
|
||||
// Scan user-level commands (~/.claude/commands/)
|
||||
const homeDir = os.homedir();
|
||||
const userCommandsDir = path.join(homeDir, '.claude', 'commands');
|
||||
const userCommands = await scanCommandsDirectory(
|
||||
userCommandsDir,
|
||||
userCommandsDir,
|
||||
'user'
|
||||
);
|
||||
allCommands.push(...userCommands);
|
||||
|
||||
// Separate built-in and custom commands
|
||||
const customCommands = allCommands.filter(cmd => cmd.namespace !== 'builtin');
|
||||
|
||||
// Sort commands alphabetically by name
|
||||
customCommands.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
res.json({
|
||||
builtIn: builtInCommands,
|
||||
custom: customCommands,
|
||||
count: allCommands.length
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error listing commands:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to list commands',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/commands/load
|
||||
* Load a specific command file and return its content and metadata
|
||||
*/
|
||||
router.post('/load', async (req, res) => {
|
||||
try {
|
||||
const { commandPath } = req.body;
|
||||
|
||||
if (!commandPath) {
|
||||
return res.status(400).json({
|
||||
error: 'Command path is required'
|
||||
});
|
||||
}
|
||||
|
||||
// Security: Prevent path traversal
|
||||
const resolvedPath = path.resolve(commandPath);
|
||||
if (!resolvedPath.startsWith(path.resolve(os.homedir())) &&
|
||||
!resolvedPath.includes('.claude/commands')) {
|
||||
return res.status(403).json({
|
||||
error: 'Access denied',
|
||||
message: 'Command must be in .claude/commands directory'
|
||||
});
|
||||
}
|
||||
|
||||
// Read and parse the command file
|
||||
const content = await fs.readFile(commandPath, 'utf8');
|
||||
const { data: metadata, content: commandContent } = parseFrontmatter(content);
|
||||
|
||||
res.json({
|
||||
path: commandPath,
|
||||
metadata,
|
||||
content: commandContent
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({
|
||||
error: 'Command not found',
|
||||
message: `Command file not found: ${req.body.commandPath}`
|
||||
});
|
||||
}
|
||||
|
||||
console.error('Error loading command:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to load command',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/commands/execute
|
||||
* Execute a command with argument replacement
|
||||
* This endpoint prepares the command content but doesn't execute bash commands yet
|
||||
* (that will be handled in the command parser utility)
|
||||
*/
|
||||
router.post('/execute', async (req, res) => {
|
||||
try {
|
||||
const { commandName, commandPath, args = [], context = {} } = req.body;
|
||||
|
||||
if (!commandName) {
|
||||
return res.status(400).json({
|
||||
error: 'Command name is required'
|
||||
});
|
||||
}
|
||||
|
||||
// Handle built-in commands
|
||||
const handler = builtInHandlers[commandName];
|
||||
if (handler) {
|
||||
try {
|
||||
const result = await handler(args, context);
|
||||
return res.json({
|
||||
...result,
|
||||
command: commandName
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Error executing built-in command ${commandName}:`, error);
|
||||
return res.status(500).json({
|
||||
error: 'Command execution failed',
|
||||
message: error.message,
|
||||
command: commandName
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Handle custom commands
|
||||
if (!commandPath) {
|
||||
return res.status(400).json({
|
||||
error: 'Command path is required for custom commands'
|
||||
});
|
||||
}
|
||||
|
||||
// Load command content
|
||||
// Security: validate commandPath is within allowed directories
|
||||
{
|
||||
const resolvedPath = path.resolve(commandPath);
|
||||
const userBase = path.resolve(path.join(os.homedir(), '.claude', 'commands'));
|
||||
const projectBase = context?.projectPath
|
||||
? path.resolve(path.join(context.projectPath, '.claude', 'commands'))
|
||||
: null;
|
||||
const isUnder = (base) => {
|
||||
const rel = path.relative(base, resolvedPath);
|
||||
return rel !== '' && !rel.startsWith('..') && !path.isAbsolute(rel);
|
||||
};
|
||||
if (!(isUnder(userBase) || (projectBase && isUnder(projectBase)))) {
|
||||
return res.status(403).json({
|
||||
error: 'Access denied',
|
||||
message: 'Command must be in .claude/commands directory'
|
||||
});
|
||||
}
|
||||
}
|
||||
const content = await fs.readFile(commandPath, 'utf8');
|
||||
const { data: metadata, content: commandContent } = parseFrontmatter(content);
|
||||
// Basic argument replacement (will be enhanced in command parser utility)
|
||||
let processedContent = commandContent;
|
||||
|
||||
// Replace $ARGUMENTS with all arguments joined
|
||||
const argsString = args.join(' ');
|
||||
processedContent = processedContent.replace(/\$ARGUMENTS/g, argsString);
|
||||
|
||||
// Replace $1, $2, etc. with positional arguments
|
||||
args.forEach((arg, index) => {
|
||||
const placeholder = `$${index + 1}`;
|
||||
processedContent = processedContent.replace(new RegExp(`\\${placeholder}\\b`, 'g'), arg);
|
||||
});
|
||||
|
||||
res.json({
|
||||
type: 'custom',
|
||||
command: commandName,
|
||||
content: processedContent,
|
||||
metadata,
|
||||
hasFileIncludes: processedContent.includes('@'),
|
||||
hasBashCommands: processedContent.includes('!')
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({
|
||||
error: 'Command not found',
|
||||
message: `Command file not found: ${req.body.commandPath}`
|
||||
});
|
||||
}
|
||||
|
||||
console.error('Error executing command:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to execute command',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
98
server/src/modules/credentials/credentials.routes.js
Normal file
98
server/src/modules/credentials/credentials.routes.js
Normal file
@@ -0,0 +1,98 @@
|
||||
import express from 'express';
|
||||
import { credentialsDb } from '../../../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// ===============================
|
||||
// Generic Credentials Management
|
||||
// ===============================
|
||||
|
||||
// Get all credentials for the authenticated user (optionally filtered by type)
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const { type } = req.query;
|
||||
const credentials = credentialsDb.getCredentials(req.user.id, type || null);
|
||||
// Don't send the actual credential values for security
|
||||
res.json({ credentials });
|
||||
} catch (error) {
|
||||
console.error('Error fetching credentials:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch credentials' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new credential
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const { credentialName, credentialType, credentialValue, description } = req.body;
|
||||
|
||||
if (!credentialName || !credentialName.trim()) {
|
||||
return res.status(400).json({ error: 'Credential name is required' });
|
||||
}
|
||||
|
||||
if (!credentialType || !credentialType.trim()) {
|
||||
return res.status(400).json({ error: 'Credential type is required' });
|
||||
}
|
||||
|
||||
if (!credentialValue || !credentialValue.trim()) {
|
||||
return res.status(400).json({ error: 'Credential value is required' });
|
||||
}
|
||||
|
||||
const result = credentialsDb.createCredential(
|
||||
req.user.id,
|
||||
credentialName.trim(),
|
||||
credentialType.trim(),
|
||||
credentialValue.trim(),
|
||||
description?.trim() || null
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
credential: result
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error creating credential:', error);
|
||||
res.status(500).json({ error: 'Failed to create credential' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete a credential
|
||||
router.delete('/:credentialId', async (req, res) => {
|
||||
try {
|
||||
const { credentialId } = req.params;
|
||||
const success = credentialsDb.deleteCredential(req.user.id, parseInt(credentialId));
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'Credential not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error deleting credential:', error);
|
||||
res.status(500).json({ error: 'Failed to delete credential' });
|
||||
}
|
||||
});
|
||||
|
||||
// Toggle credential active status
|
||||
router.patch('/:credentialId/toggle', async (req, res) => {
|
||||
try {
|
||||
const { credentialId } = req.params;
|
||||
const { isActive } = req.body;
|
||||
|
||||
if (typeof isActive !== 'boolean') {
|
||||
return res.status(400).json({ error: 'isActive must be a boolean' });
|
||||
}
|
||||
|
||||
const success = credentialsDb.toggleCredential(req.user.id, parseInt(credentialId), isActive);
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'Credential not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error toggling credential:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle credential' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
798
server/src/modules/cursor/cursor.routes.js
Normal file
798
server/src/modules/cursor/cursor.routes.js
Normal file
@@ -0,0 +1,798 @@
|
||||
import express from 'express';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { spawn } from 'child_process';
|
||||
import sqlite3 from 'sqlite3';
|
||||
import { open } from 'sqlite';
|
||||
import crypto from 'crypto';
|
||||
import { CURSOR_MODELS } from '../../../../shared/modelConstants.js';
|
||||
import { applyCustomSessionNames } from '../../../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// GET /api/cursor/config - Read Cursor CLI configuration
|
||||
router.get('/config', async (req, res) => {
|
||||
try {
|
||||
const configPath = path.join(os.homedir(), '.cursor', 'cli-config.json');
|
||||
|
||||
try {
|
||||
const configContent = await fs.readFile(configPath, 'utf8');
|
||||
const config = JSON.parse(configContent);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
config: config,
|
||||
path: configPath
|
||||
});
|
||||
} catch (error) {
|
||||
// Config doesn't exist or is invalid
|
||||
console.log('Cursor config not found or invalid:', error.message);
|
||||
|
||||
// Return default config
|
||||
res.json({
|
||||
success: true,
|
||||
config: {
|
||||
version: 1,
|
||||
model: {
|
||||
modelId: CURSOR_MODELS.DEFAULT,
|
||||
displayName: "GPT-5"
|
||||
},
|
||||
permissions: {
|
||||
allow: [],
|
||||
deny: []
|
||||
}
|
||||
},
|
||||
isDefault: true
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error reading Cursor config:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to read Cursor configuration',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/cursor/config - Update Cursor CLI configuration
|
||||
router.post('/config', async (req, res) => {
|
||||
try {
|
||||
const { permissions, model } = req.body;
|
||||
const configPath = path.join(os.homedir(), '.cursor', 'cli-config.json');
|
||||
|
||||
// Read existing config or create default
|
||||
let config = {
|
||||
version: 1,
|
||||
editor: {
|
||||
vimMode: false
|
||||
},
|
||||
hasChangedDefaultModel: false,
|
||||
privacyCache: {
|
||||
ghostMode: false,
|
||||
privacyMode: 3,
|
||||
updatedAt: Date.now()
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const existing = await fs.readFile(configPath, 'utf8');
|
||||
config = JSON.parse(existing);
|
||||
} catch (error) {
|
||||
// Config doesn't exist, use defaults
|
||||
console.log('Creating new Cursor config');
|
||||
}
|
||||
|
||||
// Update permissions if provided
|
||||
if (permissions) {
|
||||
config.permissions = {
|
||||
allow: permissions.allow || [],
|
||||
deny: permissions.deny || []
|
||||
};
|
||||
}
|
||||
|
||||
// Update model if provided
|
||||
if (model) {
|
||||
config.model = model;
|
||||
config.hasChangedDefaultModel = true;
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
const configDir = path.dirname(configPath);
|
||||
await fs.mkdir(configDir, { recursive: true });
|
||||
|
||||
// Write updated config
|
||||
await fs.writeFile(configPath, JSON.stringify(config, null, 2));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
config: config,
|
||||
message: 'Cursor configuration updated successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error updating Cursor config:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to update Cursor configuration',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/cursor/mcp - Read Cursor MCP servers configuration
|
||||
router.get('/mcp', async (req, res) => {
|
||||
try {
|
||||
const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json');
|
||||
|
||||
try {
|
||||
const mcpContent = await fs.readFile(mcpPath, 'utf8');
|
||||
const mcpConfig = JSON.parse(mcpContent);
|
||||
|
||||
// Convert to UI-friendly format
|
||||
const servers = [];
|
||||
if (mcpConfig.mcpServers && typeof mcpConfig.mcpServers === 'object') {
|
||||
for (const [name, config] of Object.entries(mcpConfig.mcpServers)) {
|
||||
const server = {
|
||||
id: name,
|
||||
name: name,
|
||||
type: 'stdio',
|
||||
scope: 'cursor',
|
||||
config: {},
|
||||
raw: config
|
||||
};
|
||||
|
||||
// Determine transport type and extract config
|
||||
if (config.command) {
|
||||
server.type = 'stdio';
|
||||
server.config.command = config.command;
|
||||
server.config.args = config.args || [];
|
||||
server.config.env = config.env || {};
|
||||
} else if (config.url) {
|
||||
server.type = config.transport || 'http';
|
||||
server.config.url = config.url;
|
||||
server.config.headers = config.headers || {};
|
||||
}
|
||||
|
||||
servers.push(server);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
servers: servers,
|
||||
path: mcpPath
|
||||
});
|
||||
} catch (error) {
|
||||
// MCP config doesn't exist
|
||||
console.log('Cursor MCP config not found:', error.message);
|
||||
res.json({
|
||||
success: true,
|
||||
servers: [],
|
||||
isDefault: true
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error reading Cursor MCP config:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to read Cursor MCP configuration',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/cursor/mcp/add - Add MCP server to Cursor configuration
|
||||
router.post('/mcp/add', async (req, res) => {
|
||||
try {
|
||||
const { name, type = 'stdio', command, args = [], url, headers = {}, env = {} } = req.body;
|
||||
const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json');
|
||||
|
||||
console.log(`➕ Adding MCP server to Cursor config: ${name}`);
|
||||
|
||||
// Read existing config or create new
|
||||
let mcpConfig = { mcpServers: {} };
|
||||
|
||||
try {
|
||||
const existing = await fs.readFile(mcpPath, 'utf8');
|
||||
mcpConfig = JSON.parse(existing);
|
||||
if (!mcpConfig.mcpServers) {
|
||||
mcpConfig.mcpServers = {};
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Creating new Cursor MCP config');
|
||||
}
|
||||
|
||||
// Build server config based on type
|
||||
let serverConfig = {};
|
||||
|
||||
if (type === 'stdio') {
|
||||
serverConfig = {
|
||||
command: command,
|
||||
args: args,
|
||||
env: env
|
||||
};
|
||||
} else if (type === 'http' || type === 'sse') {
|
||||
serverConfig = {
|
||||
url: url,
|
||||
transport: type,
|
||||
headers: headers
|
||||
};
|
||||
}
|
||||
|
||||
// Add server to config
|
||||
mcpConfig.mcpServers[name] = serverConfig;
|
||||
|
||||
// Ensure directory exists
|
||||
const mcpDir = path.dirname(mcpPath);
|
||||
await fs.mkdir(mcpDir, { recursive: true });
|
||||
|
||||
// Write updated config
|
||||
await fs.writeFile(mcpPath, JSON.stringify(mcpConfig, null, 2));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `MCP server "${name}" added to Cursor configuration`,
|
||||
config: mcpConfig
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error adding MCP server to Cursor:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to add MCP server',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE /api/cursor/mcp/:name - Remove MCP server from Cursor configuration
|
||||
router.delete('/mcp/:name', async (req, res) => {
|
||||
try {
|
||||
const { name } = req.params;
|
||||
const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json');
|
||||
|
||||
console.log(`🗑️ Removing MCP server from Cursor config: ${name}`);
|
||||
|
||||
// Read existing config
|
||||
let mcpConfig = { mcpServers: {} };
|
||||
|
||||
try {
|
||||
const existing = await fs.readFile(mcpPath, 'utf8');
|
||||
mcpConfig = JSON.parse(existing);
|
||||
} catch (error) {
|
||||
return res.status(404).json({
|
||||
error: 'Cursor MCP configuration not found'
|
||||
});
|
||||
}
|
||||
|
||||
// Check if server exists
|
||||
if (!mcpConfig.mcpServers || !mcpConfig.mcpServers[name]) {
|
||||
return res.status(404).json({
|
||||
error: `MCP server "${name}" not found in Cursor configuration`
|
||||
});
|
||||
}
|
||||
|
||||
// Remove server from config
|
||||
delete mcpConfig.mcpServers[name];
|
||||
|
||||
// Write updated config
|
||||
await fs.writeFile(mcpPath, JSON.stringify(mcpConfig, null, 2));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `MCP server "${name}" removed from Cursor configuration`,
|
||||
config: mcpConfig
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error removing MCP server from Cursor:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to remove MCP server',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/cursor/mcp/add-json - Add MCP server using JSON format
|
||||
router.post('/mcp/add-json', async (req, res) => {
|
||||
try {
|
||||
const { name, jsonConfig } = req.body;
|
||||
const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json');
|
||||
|
||||
console.log(`➕ Adding MCP server to Cursor config via JSON: ${name}`);
|
||||
|
||||
// Validate and parse JSON config
|
||||
let parsedConfig;
|
||||
try {
|
||||
parsedConfig = typeof jsonConfig === 'string' ? JSON.parse(jsonConfig) : jsonConfig;
|
||||
} catch (parseError) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid JSON configuration',
|
||||
details: parseError.message
|
||||
});
|
||||
}
|
||||
|
||||
// Read existing config or create new
|
||||
let mcpConfig = { mcpServers: {} };
|
||||
|
||||
try {
|
||||
const existing = await fs.readFile(mcpPath, 'utf8');
|
||||
mcpConfig = JSON.parse(existing);
|
||||
if (!mcpConfig.mcpServers) {
|
||||
mcpConfig.mcpServers = {};
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Creating new Cursor MCP config');
|
||||
}
|
||||
|
||||
// Add server to config
|
||||
mcpConfig.mcpServers[name] = parsedConfig;
|
||||
|
||||
// Ensure directory exists
|
||||
const mcpDir = path.dirname(mcpPath);
|
||||
await fs.mkdir(mcpDir, { recursive: true });
|
||||
|
||||
// Write updated config
|
||||
await fs.writeFile(mcpPath, JSON.stringify(mcpConfig, null, 2));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `MCP server "${name}" added to Cursor configuration via JSON`,
|
||||
config: mcpConfig
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error adding MCP server to Cursor via JSON:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to add MCP server',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/cursor/sessions - Get Cursor sessions from SQLite database
|
||||
router.get('/sessions', async (req, res) => {
|
||||
try {
|
||||
const { projectPath } = req.query;
|
||||
|
||||
// Calculate cwdID hash for the project path (Cursor uses MD5 hash)
|
||||
const cwdId = crypto.createHash('md5').update(projectPath || process.cwd()).digest('hex');
|
||||
const cursorChatsPath = path.join(os.homedir(), '.cursor', 'chats', cwdId);
|
||||
|
||||
|
||||
// Check if the directory exists
|
||||
try {
|
||||
await fs.access(cursorChatsPath);
|
||||
} catch (error) {
|
||||
// No sessions for this project
|
||||
return res.json({
|
||||
success: true,
|
||||
sessions: [],
|
||||
cwdId: cwdId,
|
||||
path: cursorChatsPath
|
||||
});
|
||||
}
|
||||
|
||||
// List all session directories
|
||||
const sessionDirs = await fs.readdir(cursorChatsPath);
|
||||
const sessions = [];
|
||||
|
||||
for (const sessionId of sessionDirs) {
|
||||
const sessionPath = path.join(cursorChatsPath, sessionId);
|
||||
const storeDbPath = path.join(sessionPath, 'store.db');
|
||||
let dbStatMtimeMs = null;
|
||||
|
||||
try {
|
||||
// Check if store.db exists
|
||||
await fs.access(storeDbPath);
|
||||
|
||||
// Capture store.db mtime as a reliable fallback timestamp (last activity)
|
||||
try {
|
||||
const stat = await fs.stat(storeDbPath);
|
||||
dbStatMtimeMs = stat.mtimeMs;
|
||||
} catch (_) {}
|
||||
|
||||
// Open SQLite database
|
||||
const db = await open({
|
||||
filename: storeDbPath,
|
||||
driver: sqlite3.Database,
|
||||
mode: sqlite3.OPEN_READONLY
|
||||
});
|
||||
|
||||
// Get metadata from meta table
|
||||
const metaRows = await db.all(`
|
||||
SELECT key, value FROM meta
|
||||
`);
|
||||
|
||||
let sessionData = {
|
||||
id: sessionId,
|
||||
name: 'Untitled Session',
|
||||
createdAt: null,
|
||||
mode: null,
|
||||
projectPath: projectPath,
|
||||
lastMessage: null,
|
||||
messageCount: 0
|
||||
};
|
||||
|
||||
// Parse meta table entries
|
||||
for (const row of metaRows) {
|
||||
if (row.value) {
|
||||
try {
|
||||
// Try to decode as hex-encoded JSON
|
||||
const hexMatch = row.value.toString().match(/^[0-9a-fA-F]+$/);
|
||||
if (hexMatch) {
|
||||
const jsonStr = Buffer.from(row.value, 'hex').toString('utf8');
|
||||
const data = JSON.parse(jsonStr);
|
||||
|
||||
if (row.key === 'agent') {
|
||||
sessionData.name = data.name || sessionData.name;
|
||||
// Normalize createdAt to ISO string in milliseconds
|
||||
let createdAt = data.createdAt;
|
||||
if (typeof createdAt === 'number') {
|
||||
if (createdAt < 1e12) {
|
||||
createdAt = createdAt * 1000; // seconds -> ms
|
||||
}
|
||||
sessionData.createdAt = new Date(createdAt).toISOString();
|
||||
} else if (typeof createdAt === 'string') {
|
||||
const n = Number(createdAt);
|
||||
if (!Number.isNaN(n)) {
|
||||
const ms = n < 1e12 ? n * 1000 : n;
|
||||
sessionData.createdAt = new Date(ms).toISOString();
|
||||
} else {
|
||||
// Assume it's already an ISO/date string
|
||||
const d = new Date(createdAt);
|
||||
sessionData.createdAt = isNaN(d.getTime()) ? null : d.toISOString();
|
||||
}
|
||||
} else {
|
||||
sessionData.createdAt = sessionData.createdAt || null;
|
||||
}
|
||||
sessionData.mode = data.mode;
|
||||
sessionData.agentId = data.agentId;
|
||||
sessionData.latestRootBlobId = data.latestRootBlobId;
|
||||
}
|
||||
} else {
|
||||
// If not hex, use raw value for simple keys
|
||||
if (row.key === 'name') {
|
||||
sessionData.name = row.value.toString();
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(`Could not parse meta value for key ${row.key}:`, e.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get message count from JSON blobs only (actual messages, not DAG structure)
|
||||
try {
|
||||
const blobCount = await db.get(`
|
||||
SELECT COUNT(*) as count
|
||||
FROM blobs
|
||||
WHERE substr(data, 1, 1) = X'7B'
|
||||
`);
|
||||
sessionData.messageCount = blobCount.count;
|
||||
|
||||
// Get the most recent JSON blob for preview (actual message, not DAG structure)
|
||||
const lastBlob = await db.get(`
|
||||
SELECT data FROM blobs
|
||||
WHERE substr(data, 1, 1) = X'7B'
|
||||
ORDER BY rowid DESC
|
||||
LIMIT 1
|
||||
`);
|
||||
|
||||
if (lastBlob && lastBlob.data) {
|
||||
try {
|
||||
// Try to extract readable preview from blob (may contain binary with embedded JSON)
|
||||
const raw = lastBlob.data.toString('utf8');
|
||||
let preview = '';
|
||||
// Attempt direct JSON parse
|
||||
try {
|
||||
const parsed = JSON.parse(raw);
|
||||
if (parsed?.content) {
|
||||
if (Array.isArray(parsed.content)) {
|
||||
const firstText = parsed.content.find(p => p?.type === 'text' && p.text)?.text || '';
|
||||
preview = firstText;
|
||||
} else if (typeof parsed.content === 'string') {
|
||||
preview = parsed.content;
|
||||
}
|
||||
}
|
||||
} catch (_) {}
|
||||
if (!preview) {
|
||||
// Strip non-printable and try to find JSON chunk
|
||||
const cleaned = raw.replace(/[^\x09\x0A\x0D\x20-\x7E]/g, '');
|
||||
const s = cleaned;
|
||||
const start = s.indexOf('{');
|
||||
const end = s.lastIndexOf('}');
|
||||
if (start !== -1 && end > start) {
|
||||
const jsonStr = s.slice(start, end + 1);
|
||||
try {
|
||||
const parsed = JSON.parse(jsonStr);
|
||||
if (parsed?.content) {
|
||||
if (Array.isArray(parsed.content)) {
|
||||
const firstText = parsed.content.find(p => p?.type === 'text' && p.text)?.text || '';
|
||||
preview = firstText;
|
||||
} else if (typeof parsed.content === 'string') {
|
||||
preview = parsed.content;
|
||||
}
|
||||
}
|
||||
} catch (_) {
|
||||
preview = s;
|
||||
}
|
||||
} else {
|
||||
preview = s;
|
||||
}
|
||||
}
|
||||
if (preview && preview.length > 0) {
|
||||
sessionData.lastMessage = preview.substring(0, 100) + (preview.length > 100 ? '...' : '');
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('Could not parse blob data:', e.message);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('Could not read blobs:', e.message);
|
||||
}
|
||||
|
||||
await db.close();
|
||||
|
||||
// Finalize createdAt: use parsed meta value when valid, else fall back to store.db mtime
|
||||
if (!sessionData.createdAt) {
|
||||
if (dbStatMtimeMs && Number.isFinite(dbStatMtimeMs)) {
|
||||
sessionData.createdAt = new Date(dbStatMtimeMs).toISOString();
|
||||
}
|
||||
}
|
||||
|
||||
sessions.push(sessionData);
|
||||
|
||||
} catch (error) {
|
||||
console.log(`Could not read session ${sessionId}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: ensure createdAt is a valid ISO string (use session directory mtime as last resort)
|
||||
for (const s of sessions) {
|
||||
if (!s.createdAt) {
|
||||
try {
|
||||
const sessionDir = path.join(cursorChatsPath, s.id);
|
||||
const st = await fs.stat(sessionDir);
|
||||
s.createdAt = new Date(st.mtimeMs).toISOString();
|
||||
} catch {
|
||||
s.createdAt = new Date().toISOString();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Sort sessions by creation date (newest first)
|
||||
sessions.sort((a, b) => {
|
||||
if (!a.createdAt) return 1;
|
||||
if (!b.createdAt) return -1;
|
||||
return new Date(b.createdAt) - new Date(a.createdAt);
|
||||
});
|
||||
|
||||
applyCustomSessionNames(sessions, 'cursor');
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
sessions: sessions,
|
||||
cwdId: cwdId,
|
||||
path: cursorChatsPath
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error reading Cursor sessions:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to read Cursor sessions',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/cursor/sessions/:sessionId - Get specific Cursor session from SQLite
|
||||
router.get('/sessions/:sessionId', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const { projectPath } = req.query;
|
||||
|
||||
// Calculate cwdID hash for the project path
|
||||
const cwdId = crypto.createHash('md5').update(projectPath || process.cwd()).digest('hex');
|
||||
const storeDbPath = path.join(os.homedir(), '.cursor', 'chats', cwdId, sessionId, 'store.db');
|
||||
|
||||
|
||||
// Open SQLite database
|
||||
const db = await open({
|
||||
filename: storeDbPath,
|
||||
driver: sqlite3.Database,
|
||||
mode: sqlite3.OPEN_READONLY
|
||||
});
|
||||
|
||||
// Get all blobs to build the DAG structure
|
||||
const allBlobs = await db.all(`
|
||||
SELECT rowid, id, data FROM blobs
|
||||
`);
|
||||
|
||||
// Build the DAG structure from parent-child relationships
|
||||
const blobMap = new Map(); // id -> blob data
|
||||
const parentRefs = new Map(); // blob id -> [parent blob ids]
|
||||
const childRefs = new Map(); // blob id -> [child blob ids]
|
||||
const jsonBlobs = []; // Clean JSON messages
|
||||
|
||||
for (const blob of allBlobs) {
|
||||
blobMap.set(blob.id, blob);
|
||||
|
||||
// Check if this is a JSON blob (actual message) or protobuf (DAG structure)
|
||||
if (blob.data && blob.data[0] === 0x7B) { // Starts with '{' - JSON blob
|
||||
try {
|
||||
const parsed = JSON.parse(blob.data.toString('utf8'));
|
||||
jsonBlobs.push({ ...blob, parsed });
|
||||
} catch (e) {
|
||||
console.log('Failed to parse JSON blob:', blob.rowid);
|
||||
}
|
||||
} else if (blob.data) { // Protobuf blob - extract parent references
|
||||
const parents = [];
|
||||
let i = 0;
|
||||
|
||||
// Scan for parent references (0x0A 0x20 followed by 32-byte hash)
|
||||
while (i < blob.data.length - 33) {
|
||||
if (blob.data[i] === 0x0A && blob.data[i+1] === 0x20) {
|
||||
const parentHash = blob.data.slice(i+2, i+34).toString('hex');
|
||||
if (blobMap.has(parentHash)) {
|
||||
parents.push(parentHash);
|
||||
}
|
||||
i += 34;
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
if (parents.length > 0) {
|
||||
parentRefs.set(blob.id, parents);
|
||||
// Update child references
|
||||
for (const parentId of parents) {
|
||||
if (!childRefs.has(parentId)) {
|
||||
childRefs.set(parentId, []);
|
||||
}
|
||||
childRefs.get(parentId).push(blob.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Perform topological sort to get chronological order
|
||||
const visited = new Set();
|
||||
const sorted = [];
|
||||
|
||||
// DFS-based topological sort
|
||||
function visit(nodeId) {
|
||||
if (visited.has(nodeId)) return;
|
||||
visited.add(nodeId);
|
||||
|
||||
// Visit all parents first (dependencies)
|
||||
const parents = parentRefs.get(nodeId) || [];
|
||||
for (const parentId of parents) {
|
||||
visit(parentId);
|
||||
}
|
||||
|
||||
// Add this node after all its parents
|
||||
const blob = blobMap.get(nodeId);
|
||||
if (blob) {
|
||||
sorted.push(blob);
|
||||
}
|
||||
}
|
||||
|
||||
// Start with nodes that have no parents (roots)
|
||||
for (const blob of allBlobs) {
|
||||
if (!parentRefs.has(blob.id)) {
|
||||
visit(blob.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Visit any remaining nodes (disconnected components)
|
||||
for (const blob of allBlobs) {
|
||||
visit(blob.id);
|
||||
}
|
||||
|
||||
// Now extract JSON messages in the order they appear in the sorted DAG
|
||||
const messageOrder = new Map(); // JSON blob id -> order index
|
||||
let orderIndex = 0;
|
||||
|
||||
for (const blob of sorted) {
|
||||
// Check if this blob references any JSON messages
|
||||
if (blob.data && blob.data[0] !== 0x7B) { // Protobuf blob
|
||||
// Look for JSON blob references
|
||||
for (const jsonBlob of jsonBlobs) {
|
||||
try {
|
||||
const jsonIdBytes = Buffer.from(jsonBlob.id, 'hex');
|
||||
if (blob.data.includes(jsonIdBytes)) {
|
||||
if (!messageOrder.has(jsonBlob.id)) {
|
||||
messageOrder.set(jsonBlob.id, orderIndex++);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Skip if can't convert ID
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort JSON blobs by their appearance order in the DAG
|
||||
const sortedJsonBlobs = jsonBlobs.sort((a, b) => {
|
||||
const orderA = messageOrder.get(a.id) ?? Number.MAX_SAFE_INTEGER;
|
||||
const orderB = messageOrder.get(b.id) ?? Number.MAX_SAFE_INTEGER;
|
||||
if (orderA !== orderB) return orderA - orderB;
|
||||
// Fallback to rowid if not in order map
|
||||
return a.rowid - b.rowid;
|
||||
});
|
||||
|
||||
// Use sorted JSON blobs
|
||||
const blobs = sortedJsonBlobs.map((blob, idx) => ({
|
||||
...blob,
|
||||
sequence_num: idx + 1,
|
||||
original_rowid: blob.rowid
|
||||
}));
|
||||
|
||||
// Get metadata from meta table
|
||||
const metaRows = await db.all(`
|
||||
SELECT key, value FROM meta
|
||||
`);
|
||||
|
||||
// Parse metadata
|
||||
let metadata = {};
|
||||
for (const row of metaRows) {
|
||||
if (row.value) {
|
||||
try {
|
||||
// Try to decode as hex-encoded JSON
|
||||
const hexMatch = row.value.toString().match(/^[0-9a-fA-F]+$/);
|
||||
if (hexMatch) {
|
||||
const jsonStr = Buffer.from(row.value, 'hex').toString('utf8');
|
||||
metadata[row.key] = JSON.parse(jsonStr);
|
||||
} else {
|
||||
metadata[row.key] = row.value.toString();
|
||||
}
|
||||
} catch (e) {
|
||||
metadata[row.key] = row.value.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract messages from sorted JSON blobs
|
||||
const messages = [];
|
||||
for (const blob of blobs) {
|
||||
try {
|
||||
// We already parsed JSON blobs earlier
|
||||
const parsed = blob.parsed;
|
||||
|
||||
if (parsed) {
|
||||
// Filter out ONLY system messages at the server level
|
||||
// Check both direct role and nested message.role
|
||||
const role = parsed?.role || parsed?.message?.role;
|
||||
if (role === 'system') {
|
||||
continue; // Skip only system messages
|
||||
}
|
||||
messages.push({
|
||||
id: blob.id,
|
||||
sequence: blob.sequence_num,
|
||||
rowid: blob.original_rowid,
|
||||
content: parsed
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
// Skip blobs that cause errors
|
||||
console.log(`Skipping blob ${blob.id}: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
await db.close();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
session: {
|
||||
id: sessionId,
|
||||
projectPath: projectPath,
|
||||
messages: messages,
|
||||
metadata: metadata,
|
||||
cwdId: cwdId
|
||||
}
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error reading Cursor session:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to read Cursor session',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
24
server/src/modules/gemini/gemini.routes.js
Normal file
24
server/src/modules/gemini/gemini.routes.js
Normal file
@@ -0,0 +1,24 @@
|
||||
import express from 'express';
|
||||
import sessionManager from '../../../sessionManager.js';
|
||||
import { sessionNamesDb } from '../../../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.delete('/sessions/:sessionId', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
|
||||
if (!sessionId || typeof sessionId !== 'string' || !/^[a-zA-Z0-9_.-]{1,100}$/.test(sessionId)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid session ID format' });
|
||||
}
|
||||
|
||||
await sessionManager.deleteSession(sessionId);
|
||||
sessionNamesDb.deleteName(sessionId, 'gemini');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`Error deleting Gemini session ${req.params.sessionId}:`, error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
1488
server/src/modules/git/git.routes.js
Normal file
1488
server/src/modules/git/git.routes.js
Normal file
File diff suppressed because it is too large
Load Diff
48
server/src/modules/mcp-utils/mcp-utils.routes.js
Normal file
48
server/src/modules/mcp-utils/mcp-utils.routes.js
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* MCP UTILITIES API ROUTES
|
||||
* ========================
|
||||
*
|
||||
* API endpoints for MCP server detection and configuration utilities.
|
||||
* These endpoints expose centralized MCP detection functionality.
|
||||
*/
|
||||
|
||||
import express from 'express';
|
||||
import { detectTaskMasterMCPServer, getAllMCPServers } from '../../../utils/mcp-detector.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* GET /api/mcp-utils/taskmaster-server
|
||||
* Check if TaskMaster MCP server is configured
|
||||
*/
|
||||
router.get('/taskmaster-server', async (req, res) => {
|
||||
try {
|
||||
const result = await detectTaskMasterMCPServer();
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('TaskMaster MCP detection error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to detect TaskMaster MCP server',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/mcp-utils/all-servers
|
||||
* Get all configured MCP servers
|
||||
*/
|
||||
router.get('/all-servers', async (req, res) => {
|
||||
try {
|
||||
const result = await getAllMCPServers();
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('MCP servers detection error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to get MCP servers',
|
||||
message: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
552
server/src/modules/mcp/mcp.routes.js
Normal file
552
server/src/modules/mcp/mcp.routes.js
Normal file
@@ -0,0 +1,552 @@
|
||||
import express from 'express';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname } from 'path';
|
||||
import { spawn } from 'child_process';
|
||||
|
||||
const router = express.Router();
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
// Claude CLI command routes
|
||||
|
||||
// GET /api/mcp/cli/list - List MCP servers using Claude CLI
|
||||
router.get('/cli/list', async (req, res) => {
|
||||
try {
|
||||
console.log('📋 Listing MCP servers using Claude CLI');
|
||||
|
||||
const { spawn } = await import('child_process');
|
||||
const { promisify } = await import('util');
|
||||
const exec = promisify(spawn);
|
||||
|
||||
const process = spawn('claude', ['mcp', 'list'], {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
process.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
process.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
process.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
res.json({ success: true, output: stdout, servers: parseClaudeListOutput(stdout) });
|
||||
} else {
|
||||
console.error('Claude CLI error:', stderr);
|
||||
res.status(500).json({ error: 'Claude CLI command failed', details: stderr });
|
||||
}
|
||||
});
|
||||
|
||||
process.on('error', (error) => {
|
||||
console.error('Error running Claude CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message });
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error listing MCP servers via CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to list MCP servers', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/mcp/cli/add - Add MCP server using Claude CLI
|
||||
router.post('/cli/add', async (req, res) => {
|
||||
try {
|
||||
const { name, type = 'stdio', command, args = [], url, headers = {}, env = {}, scope = 'user', projectPath } = req.body;
|
||||
|
||||
console.log(`➕ Adding MCP server using Claude CLI (${scope} scope):`, name);
|
||||
|
||||
const { spawn } = await import('child_process');
|
||||
|
||||
let cliArgs = ['mcp', 'add'];
|
||||
|
||||
// Add scope flag
|
||||
cliArgs.push('--scope', scope);
|
||||
|
||||
if (type === 'http') {
|
||||
cliArgs.push('--transport', 'http', name, url);
|
||||
// Add headers if provided
|
||||
Object.entries(headers).forEach(([key, value]) => {
|
||||
cliArgs.push('--header', `${key}: ${value}`);
|
||||
});
|
||||
} else if (type === 'sse') {
|
||||
cliArgs.push('--transport', 'sse', name, url);
|
||||
// Add headers if provided
|
||||
Object.entries(headers).forEach(([key, value]) => {
|
||||
cliArgs.push('--header', `${key}: ${value}`);
|
||||
});
|
||||
} else {
|
||||
// stdio (default): claude mcp add --scope user <name> <command> [args...]
|
||||
cliArgs.push(name);
|
||||
// Add environment variables
|
||||
Object.entries(env).forEach(([key, value]) => {
|
||||
cliArgs.push('-e', `${key}=${value}`);
|
||||
});
|
||||
cliArgs.push(command);
|
||||
if (args && args.length > 0) {
|
||||
cliArgs.push(...args);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('🔧 Running Claude CLI command:', 'claude', cliArgs.join(' '));
|
||||
|
||||
// For local scope, we need to run the command in the project directory
|
||||
const spawnOptions = {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
};
|
||||
|
||||
if (scope === 'local' && projectPath) {
|
||||
spawnOptions.cwd = projectPath;
|
||||
console.log('📁 Running in project directory:', projectPath);
|
||||
}
|
||||
|
||||
const process = spawn('claude', cliArgs, spawnOptions);
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
process.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
process.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
process.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
res.json({ success: true, output: stdout, message: `MCP server "${name}" added successfully` });
|
||||
} else {
|
||||
console.error('Claude CLI error:', stderr);
|
||||
res.status(400).json({ error: 'Claude CLI command failed', details: stderr });
|
||||
}
|
||||
});
|
||||
|
||||
process.on('error', (error) => {
|
||||
console.error('Error running Claude CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message });
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error adding MCP server via CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to add MCP server', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/mcp/cli/add-json - Add MCP server using JSON format
|
||||
router.post('/cli/add-json', async (req, res) => {
|
||||
try {
|
||||
const { name, jsonConfig, scope = 'user', projectPath } = req.body;
|
||||
|
||||
console.log('➕ Adding MCP server using JSON format:', name);
|
||||
|
||||
// Validate and parse JSON config
|
||||
let parsedConfig;
|
||||
try {
|
||||
parsedConfig = typeof jsonConfig === 'string' ? JSON.parse(jsonConfig) : jsonConfig;
|
||||
} catch (parseError) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid JSON configuration',
|
||||
details: parseError.message
|
||||
});
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if (!parsedConfig.type) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid configuration',
|
||||
details: 'Missing required field: type'
|
||||
});
|
||||
}
|
||||
|
||||
if (parsedConfig.type === 'stdio' && !parsedConfig.command) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid configuration',
|
||||
details: 'stdio type requires a command field'
|
||||
});
|
||||
}
|
||||
|
||||
if ((parsedConfig.type === 'http' || parsedConfig.type === 'sse') && !parsedConfig.url) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid configuration',
|
||||
details: `${parsedConfig.type} type requires a url field`
|
||||
});
|
||||
}
|
||||
|
||||
const { spawn } = await import('child_process');
|
||||
|
||||
// Build the command: claude mcp add-json --scope <scope> <name> '<json>'
|
||||
const cliArgs = ['mcp', 'add-json', '--scope', scope, name];
|
||||
|
||||
// Add the JSON config as a properly formatted string
|
||||
const jsonString = JSON.stringify(parsedConfig);
|
||||
cliArgs.push(jsonString);
|
||||
|
||||
console.log('🔧 Running Claude CLI command:', 'claude', cliArgs[0], cliArgs[1], cliArgs[2], cliArgs[3], cliArgs[4], jsonString);
|
||||
|
||||
// For local scope, we need to run the command in the project directory
|
||||
const spawnOptions = {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
};
|
||||
|
||||
if (scope === 'local' && projectPath) {
|
||||
spawnOptions.cwd = projectPath;
|
||||
console.log('📁 Running in project directory:', projectPath);
|
||||
}
|
||||
|
||||
const process = spawn('claude', cliArgs, spawnOptions);
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
process.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
process.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
process.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
res.json({ success: true, output: stdout, message: `MCP server "${name}" added successfully via JSON` });
|
||||
} else {
|
||||
console.error('Claude CLI error:', stderr);
|
||||
res.status(400).json({ error: 'Claude CLI command failed', details: stderr });
|
||||
}
|
||||
});
|
||||
|
||||
process.on('error', (error) => {
|
||||
console.error('Error running Claude CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message });
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error adding MCP server via JSON:', error);
|
||||
res.status(500).json({ error: 'Failed to add MCP server', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE /api/mcp/cli/remove/:name - Remove MCP server using Claude CLI
|
||||
router.delete('/cli/remove/:name', async (req, res) => {
|
||||
try {
|
||||
const { name } = req.params;
|
||||
const { scope } = req.query; // Get scope from query params
|
||||
|
||||
// Handle the ID format (remove scope prefix if present)
|
||||
let actualName = name;
|
||||
let actualScope = scope;
|
||||
|
||||
// If the name includes a scope prefix like "local:test", extract it
|
||||
if (name.includes(':')) {
|
||||
const [prefix, serverName] = name.split(':');
|
||||
actualName = serverName;
|
||||
actualScope = actualScope || prefix; // Use prefix as scope if not provided in query
|
||||
}
|
||||
|
||||
console.log('🗑️ Removing MCP server using Claude CLI:', actualName, 'scope:', actualScope);
|
||||
|
||||
const { spawn } = await import('child_process');
|
||||
|
||||
// Build command args based on scope
|
||||
let cliArgs = ['mcp', 'remove'];
|
||||
|
||||
// Add scope flag if it's local scope
|
||||
if (actualScope === 'local') {
|
||||
cliArgs.push('--scope', 'local');
|
||||
} else if (actualScope === 'user' || !actualScope) {
|
||||
// User scope is default, but we can be explicit
|
||||
cliArgs.push('--scope', 'user');
|
||||
}
|
||||
|
||||
cliArgs.push(actualName);
|
||||
|
||||
console.log('🔧 Running Claude CLI command:', 'claude', cliArgs.join(' '));
|
||||
|
||||
const process = spawn('claude', cliArgs, {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
process.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
process.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
process.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
res.json({ success: true, output: stdout, message: `MCP server "${name}" removed successfully` });
|
||||
} else {
|
||||
console.error('Claude CLI error:', stderr);
|
||||
res.status(400).json({ error: 'Claude CLI command failed', details: stderr });
|
||||
}
|
||||
});
|
||||
|
||||
process.on('error', (error) => {
|
||||
console.error('Error running Claude CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message });
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error removing MCP server via CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to remove MCP server', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/mcp/cli/get/:name - Get MCP server details using Claude CLI
|
||||
router.get('/cli/get/:name', async (req, res) => {
|
||||
try {
|
||||
const { name } = req.params;
|
||||
|
||||
console.log('📄 Getting MCP server details using Claude CLI:', name);
|
||||
|
||||
const { spawn } = await import('child_process');
|
||||
|
||||
const process = spawn('claude', ['mcp', 'get', name], {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
process.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
process.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
process.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
res.json({ success: true, output: stdout, server: parseClaudeGetOutput(stdout) });
|
||||
} else {
|
||||
console.error('Claude CLI error:', stderr);
|
||||
res.status(404).json({ error: 'Claude CLI command failed', details: stderr });
|
||||
}
|
||||
});
|
||||
|
||||
process.on('error', (error) => {
|
||||
console.error('Error running Claude CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message });
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error getting MCP server details via CLI:', error);
|
||||
res.status(500).json({ error: 'Failed to get MCP server details', details: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/mcp/config/read - Read MCP servers directly from Claude config files
|
||||
router.get('/config/read', async (req, res) => {
|
||||
try {
|
||||
console.log('📖 Reading MCP servers from Claude config files');
|
||||
|
||||
const homeDir = os.homedir();
|
||||
const configPaths = [
|
||||
path.join(homeDir, '.claude.json'),
|
||||
path.join(homeDir, '.claude', 'settings.json')
|
||||
];
|
||||
|
||||
let configData = null;
|
||||
let configPath = null;
|
||||
|
||||
// Try to read from either config file
|
||||
for (const filepath of configPaths) {
|
||||
try {
|
||||
const fileContent = await fs.readFile(filepath, 'utf8');
|
||||
configData = JSON.parse(fileContent);
|
||||
configPath = filepath;
|
||||
console.log(`✅ Found Claude config at: ${filepath}`);
|
||||
break;
|
||||
} catch (error) {
|
||||
// File doesn't exist or is not valid JSON, try next
|
||||
console.log(`ℹ️ Config not found or invalid at: ${filepath}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!configData) {
|
||||
return res.json({
|
||||
success: false,
|
||||
message: 'No Claude configuration file found',
|
||||
servers: []
|
||||
});
|
||||
}
|
||||
|
||||
// Extract MCP servers from the config
|
||||
const servers = [];
|
||||
|
||||
// Check for user-scoped MCP servers (at root level)
|
||||
if (configData.mcpServers && typeof configData.mcpServers === 'object' && Object.keys(configData.mcpServers).length > 0) {
|
||||
console.log('🔍 Found user-scoped MCP servers:', Object.keys(configData.mcpServers));
|
||||
for (const [name, config] of Object.entries(configData.mcpServers)) {
|
||||
const server = {
|
||||
id: name,
|
||||
name: name,
|
||||
type: 'stdio', // Default type
|
||||
scope: 'user', // User scope - available across all projects
|
||||
config: {},
|
||||
raw: config // Include raw config for full details
|
||||
};
|
||||
|
||||
// Determine transport type and extract config
|
||||
if (config.command) {
|
||||
server.type = 'stdio';
|
||||
server.config.command = config.command;
|
||||
server.config.args = config.args || [];
|
||||
server.config.env = config.env || {};
|
||||
} else if (config.url) {
|
||||
server.type = config.transport || 'http';
|
||||
server.config.url = config.url;
|
||||
server.config.headers = config.headers || {};
|
||||
}
|
||||
|
||||
servers.push(server);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for local-scoped MCP servers (project-specific)
|
||||
const currentProjectPath = process.cwd();
|
||||
|
||||
// Check under 'projects' key
|
||||
if (configData.projects && configData.projects[currentProjectPath]) {
|
||||
const projectConfig = configData.projects[currentProjectPath];
|
||||
if (projectConfig.mcpServers && typeof projectConfig.mcpServers === 'object' && Object.keys(projectConfig.mcpServers).length > 0) {
|
||||
console.log(`🔍 Found local-scoped MCP servers for ${currentProjectPath}:`, Object.keys(projectConfig.mcpServers));
|
||||
for (const [name, config] of Object.entries(projectConfig.mcpServers)) {
|
||||
const server = {
|
||||
id: `local:${name}`, // Prefix with scope for uniqueness
|
||||
name: name, // Keep original name
|
||||
type: 'stdio', // Default type
|
||||
scope: 'local', // Local scope - only for this project
|
||||
projectPath: currentProjectPath,
|
||||
config: {},
|
||||
raw: config // Include raw config for full details
|
||||
};
|
||||
|
||||
// Determine transport type and extract config
|
||||
if (config.command) {
|
||||
server.type = 'stdio';
|
||||
server.config.command = config.command;
|
||||
server.config.args = config.args || [];
|
||||
server.config.env = config.env || {};
|
||||
} else if (config.url) {
|
||||
server.type = config.transport || 'http';
|
||||
server.config.url = config.url;
|
||||
server.config.headers = config.headers || {};
|
||||
}
|
||||
|
||||
servers.push(server);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`📋 Found ${servers.length} MCP servers in config`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
configPath: configPath,
|
||||
servers: servers
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error reading Claude config:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to read Claude configuration',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Helper functions to parse Claude CLI output
|
||||
function parseClaudeListOutput(output) {
|
||||
const servers = [];
|
||||
const lines = output.split('\n').filter(line => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
// Skip the header line
|
||||
if (line.includes('Checking MCP server health')) continue;
|
||||
|
||||
// Parse lines like "test: test test - ✗ Failed to connect"
|
||||
// or "server-name: command or description - ✓ Connected"
|
||||
if (line.includes(':')) {
|
||||
const colonIndex = line.indexOf(':');
|
||||
const name = line.substring(0, colonIndex).trim();
|
||||
|
||||
// Skip empty names
|
||||
if (!name) continue;
|
||||
|
||||
// Extract the rest after the name
|
||||
const rest = line.substring(colonIndex + 1).trim();
|
||||
|
||||
// Try to extract description and status
|
||||
let description = rest;
|
||||
let status = 'unknown';
|
||||
let type = 'stdio'; // default type
|
||||
|
||||
// Check for status indicators
|
||||
if (rest.includes('✓') || rest.includes('✗')) {
|
||||
const statusMatch = rest.match(/(.*?)\s*-\s*([✓✗].*)$/);
|
||||
if (statusMatch) {
|
||||
description = statusMatch[1].trim();
|
||||
status = statusMatch[2].includes('✓') ? 'connected' : 'failed';
|
||||
}
|
||||
}
|
||||
|
||||
// Try to determine type from description
|
||||
if (description.startsWith('http://') || description.startsWith('https://')) {
|
||||
type = 'http';
|
||||
}
|
||||
|
||||
servers.push({
|
||||
name,
|
||||
type,
|
||||
status: status || 'active',
|
||||
description
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log('🔍 Parsed Claude CLI servers:', servers);
|
||||
return servers;
|
||||
}
|
||||
|
||||
function parseClaudeGetOutput(output) {
|
||||
// Parse the output from 'claude mcp get <name>' command
|
||||
// This is a simple parser - might need adjustment based on actual output format
|
||||
try {
|
||||
// Try to extract JSON if present
|
||||
const jsonMatch = output.match(/\{[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
return JSON.parse(jsonMatch[0]);
|
||||
}
|
||||
|
||||
// Otherwise, parse as text
|
||||
const server = { raw_output: output };
|
||||
const lines = output.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.includes('Name:')) {
|
||||
server.name = line.split(':')[1]?.trim();
|
||||
} else if (line.includes('Type:')) {
|
||||
server.type = line.split(':')[1]?.trim();
|
||||
} else if (line.includes('Command:')) {
|
||||
server.command = line.split(':')[1]?.trim();
|
||||
} else if (line.includes('URL:')) {
|
||||
server.url = line.split(':')[1]?.trim();
|
||||
}
|
||||
}
|
||||
|
||||
return server;
|
||||
} catch (error) {
|
||||
return { raw_output: output, parse_error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
export default router;
|
||||
61
server/src/modules/messages/messages.routes.js
Normal file
61
server/src/modules/messages/messages.routes.js
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Unified messages endpoint.
|
||||
*
|
||||
* GET /api/sessions/:sessionId/messages?provider=claude&projectName=foo&limit=50&offset=0
|
||||
*
|
||||
* Replaces the four provider-specific session message endpoints with a single route
|
||||
* that delegates to the appropriate adapter via the provider registry.
|
||||
*
|
||||
* @module routes/messages
|
||||
*/
|
||||
|
||||
import express from 'express';
|
||||
import { getProvider, getAllProviders } from '../../../providers/registry.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* GET /api/sessions/:sessionId/messages
|
||||
*
|
||||
* Auth: authenticateToken applied at mount level in index.js
|
||||
*
|
||||
* Query params:
|
||||
* provider - 'claude' | 'cursor' | 'codex' | 'gemini' (default: 'claude')
|
||||
* projectName - required for claude provider
|
||||
* projectPath - required for cursor provider (absolute path used for cwdId hash)
|
||||
* limit - page size (omit or null for all)
|
||||
* offset - pagination offset (default: 0)
|
||||
*/
|
||||
router.get('/:sessionId/messages', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
const provider = req.query.provider || 'claude';
|
||||
const projectName = req.query.projectName || '';
|
||||
const projectPath = req.query.projectPath || '';
|
||||
const limitParam = req.query.limit;
|
||||
const limit = limitParam !== undefined && limitParam !== null && limitParam !== ''
|
||||
? parseInt(limitParam, 10)
|
||||
: null;
|
||||
const offset = parseInt(req.query.offset || '0', 10);
|
||||
|
||||
const adapter = getProvider(provider);
|
||||
if (!adapter) {
|
||||
const available = getAllProviders().join(', ');
|
||||
return res.status(400).json({ error: `Unknown provider: ${provider}. Available: ${available}` });
|
||||
}
|
||||
|
||||
const result = await adapter.fetchHistory(sessionId, {
|
||||
projectName,
|
||||
projectPath,
|
||||
limit,
|
||||
offset,
|
||||
});
|
||||
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
console.error('Error fetching unified messages:', error);
|
||||
return res.status(500).json({ error: 'Failed to fetch messages' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -0,0 +1,30 @@
|
||||
import express from 'express';
|
||||
import { notificationPreferencesDb } from '../../../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// ===============================
|
||||
// Notification Preferences
|
||||
// ===============================
|
||||
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const preferences = notificationPreferencesDb.getPreferences(req.user.id);
|
||||
res.json({ success: true, preferences });
|
||||
} catch (error) {
|
||||
console.error('Error fetching notification preferences:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch notification preferences' });
|
||||
}
|
||||
});
|
||||
|
||||
router.put('/', async (req, res) => {
|
||||
try {
|
||||
const preferences = notificationPreferencesDb.updatePreferences(req.user.id, req.body || {});
|
||||
res.json({ success: true, preferences });
|
||||
} catch (error) {
|
||||
console.error('Error saving notification preferences:', error);
|
||||
res.status(500).json({ error: 'Failed to save notification preferences' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
307
server/src/modules/plugins/plugins.routes.js
Normal file
307
server/src/modules/plugins/plugins.routes.js
Normal file
@@ -0,0 +1,307 @@
|
||||
import express from 'express';
|
||||
import path from 'path';
|
||||
import http from 'http';
|
||||
import mime from 'mime-types';
|
||||
import fs from 'fs';
|
||||
import {
|
||||
scanPlugins,
|
||||
getPluginsConfig,
|
||||
getPluginsDir,
|
||||
savePluginsConfig,
|
||||
getPluginDir,
|
||||
resolvePluginAssetPath,
|
||||
installPluginFromGit,
|
||||
updatePluginFromGit,
|
||||
uninstallPlugin,
|
||||
} from '../../../utils/plugin-loader.js';
|
||||
import {
|
||||
startPluginServer,
|
||||
stopPluginServer,
|
||||
getPluginPort,
|
||||
isPluginRunning,
|
||||
} from '../../../utils/plugin-process-manager.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// GET / — List all installed plugins (includes server running status)
|
||||
router.get('/', (req, res) => {
|
||||
try {
|
||||
const plugins = scanPlugins().map(p => ({
|
||||
...p,
|
||||
serverRunning: p.server ? isPluginRunning(p.name) : false,
|
||||
}));
|
||||
res.json({ plugins });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to scan plugins', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /:name/manifest — Get single plugin manifest
|
||||
router.get('/:name/manifest', (req, res) => {
|
||||
try {
|
||||
if (!/^[a-zA-Z0-9_-]+$/.test(req.params.name)) {
|
||||
return res.status(400).json({ error: 'Invalid plugin name' });
|
||||
}
|
||||
const plugins = scanPlugins();
|
||||
const plugin = plugins.find(p => p.name === req.params.name);
|
||||
if (!plugin) {
|
||||
return res.status(404).json({ error: 'Plugin not found' });
|
||||
}
|
||||
res.json(plugin);
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to read plugin manifest', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /:name/assets/* — Serve plugin static files
|
||||
router.get('/:name/assets/*', (req, res) => {
|
||||
const pluginName = req.params.name;
|
||||
if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) {
|
||||
return res.status(400).json({ error: 'Invalid plugin name' });
|
||||
}
|
||||
const assetPath = req.params[0];
|
||||
|
||||
if (!assetPath) {
|
||||
return res.status(400).json({ error: 'No asset path specified' });
|
||||
}
|
||||
|
||||
const resolvedPath = resolvePluginAssetPath(pluginName, assetPath);
|
||||
if (!resolvedPath) {
|
||||
return res.status(404).json({ error: 'Asset not found' });
|
||||
}
|
||||
|
||||
try {
|
||||
const stat = fs.statSync(resolvedPath);
|
||||
if (!stat.isFile()) {
|
||||
return res.status(404).json({ error: 'Asset not found' });
|
||||
}
|
||||
} catch {
|
||||
return res.status(404).json({ error: 'Asset not found' });
|
||||
}
|
||||
|
||||
const contentType = mime.lookup(resolvedPath) || 'application/octet-stream';
|
||||
res.setHeader('Content-Type', contentType);
|
||||
// Prevent CDN/proxy caching of plugin assets so updates take effect immediately
|
||||
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate');
|
||||
res.setHeader('Pragma', 'no-cache');
|
||||
res.setHeader('Expires', '0');
|
||||
const stream = fs.createReadStream(resolvedPath);
|
||||
stream.on('error', () => {
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: 'Failed to read asset' });
|
||||
} else {
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
stream.pipe(res);
|
||||
});
|
||||
|
||||
// PUT /:name/enable — Toggle plugin enabled/disabled (starts/stops server if applicable)
|
||||
router.put('/:name/enable', async (req, res) => {
|
||||
try {
|
||||
const { enabled } = req.body;
|
||||
if (typeof enabled !== 'boolean') {
|
||||
return res.status(400).json({ error: '"enabled" must be a boolean' });
|
||||
}
|
||||
|
||||
const plugins = scanPlugins();
|
||||
const plugin = plugins.find(p => p.name === req.params.name);
|
||||
if (!plugin) {
|
||||
return res.status(404).json({ error: 'Plugin not found' });
|
||||
}
|
||||
|
||||
const config = getPluginsConfig();
|
||||
config[req.params.name] = { ...config[req.params.name], enabled };
|
||||
savePluginsConfig(config);
|
||||
|
||||
// Start or stop the plugin server as needed
|
||||
if (plugin.server) {
|
||||
if (enabled && !isPluginRunning(plugin.name)) {
|
||||
const pluginDir = getPluginDir(plugin.name);
|
||||
if (pluginDir) {
|
||||
try {
|
||||
await startPluginServer(plugin.name, pluginDir, plugin.server);
|
||||
} catch (err) {
|
||||
console.error(`[Plugins] Failed to start server for "${plugin.name}":`, err.message);
|
||||
}
|
||||
}
|
||||
} else if (!enabled && isPluginRunning(plugin.name)) {
|
||||
await stopPluginServer(plugin.name);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, name: req.params.name, enabled });
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: 'Failed to update plugin', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /install — Install plugin from git URL
|
||||
router.post('/install', async (req, res) => {
|
||||
try {
|
||||
const { url } = req.body;
|
||||
if (!url || typeof url !== 'string') {
|
||||
return res.status(400).json({ error: '"url" is required and must be a string' });
|
||||
}
|
||||
|
||||
// Basic URL validation
|
||||
if (!url.startsWith('https://') && !url.startsWith('git@')) {
|
||||
return res.status(400).json({ error: 'URL must start with https:// or git@' });
|
||||
}
|
||||
|
||||
const manifest = await installPluginFromGit(url);
|
||||
|
||||
// Auto-start the server if the plugin has one (enabled by default)
|
||||
if (manifest.server) {
|
||||
const pluginDir = getPluginDir(manifest.name);
|
||||
if (pluginDir) {
|
||||
try {
|
||||
await startPluginServer(manifest.name, pluginDir, manifest.server);
|
||||
} catch (err) {
|
||||
console.error(`[Plugins] Failed to start server for "${manifest.name}":`, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, plugin: manifest });
|
||||
} catch (err) {
|
||||
res.status(400).json({ error: 'Failed to install plugin', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /:name/update — Pull latest from git (restarts server if running)
|
||||
router.post('/:name/update', async (req, res) => {
|
||||
try {
|
||||
const pluginName = req.params.name;
|
||||
|
||||
if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) {
|
||||
return res.status(400).json({ error: 'Invalid plugin name' });
|
||||
}
|
||||
|
||||
const wasRunning = isPluginRunning(pluginName);
|
||||
if (wasRunning) {
|
||||
await stopPluginServer(pluginName);
|
||||
}
|
||||
|
||||
const manifest = await updatePluginFromGit(pluginName);
|
||||
|
||||
// Restart server if it was running before the update
|
||||
if (wasRunning && manifest.server) {
|
||||
const pluginDir = getPluginDir(pluginName);
|
||||
if (pluginDir) {
|
||||
try {
|
||||
await startPluginServer(pluginName, pluginDir, manifest.server);
|
||||
} catch (err) {
|
||||
console.error(`[Plugins] Failed to restart server for "${pluginName}":`, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, plugin: manifest });
|
||||
} catch (err) {
|
||||
res.status(400).json({ error: 'Failed to update plugin', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ALL /:name/rpc/* — Proxy requests to plugin's server subprocess
|
||||
router.all('/:name/rpc/*', async (req, res) => {
|
||||
const pluginName = req.params.name;
|
||||
const rpcPath = req.params[0] || '';
|
||||
|
||||
if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) {
|
||||
return res.status(400).json({ error: 'Invalid plugin name' });
|
||||
}
|
||||
|
||||
let port = getPluginPort(pluginName);
|
||||
if (!port) {
|
||||
// Lazily start the plugin server if it exists and is enabled
|
||||
const plugins = scanPlugins();
|
||||
const plugin = plugins.find(p => p.name === pluginName);
|
||||
if (!plugin || !plugin.server) {
|
||||
return res.status(503).json({ error: 'Plugin server is not running' });
|
||||
}
|
||||
if (!plugin.enabled) {
|
||||
return res.status(503).json({ error: 'Plugin is disabled' });
|
||||
}
|
||||
const pluginDir = path.join(getPluginsDir(), plugin.dirName);
|
||||
try {
|
||||
port = await startPluginServer(pluginName, pluginDir, plugin.server);
|
||||
} catch (err) {
|
||||
return res.status(503).json({ error: 'Plugin server failed to start', details: err.message });
|
||||
}
|
||||
}
|
||||
|
||||
// Inject configured secrets as headers
|
||||
const config = getPluginsConfig();
|
||||
const pluginConfig = config[pluginName] || {};
|
||||
const secrets = pluginConfig.secrets || {};
|
||||
|
||||
const headers = {
|
||||
'content-type': req.headers['content-type'] || 'application/json',
|
||||
};
|
||||
|
||||
// Add per-plugin user-configured secrets as X-Plugin-Secret-* headers
|
||||
for (const [key, value] of Object.entries(secrets)) {
|
||||
headers[`x-plugin-secret-${key.toLowerCase()}`] = String(value);
|
||||
}
|
||||
|
||||
// Reconstruct query string
|
||||
const qs = req.url.includes('?') ? '?' + req.url.split('?').slice(1).join('?') : '';
|
||||
|
||||
const options = {
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: `/${rpcPath}${qs}`,
|
||||
method: req.method,
|
||||
headers,
|
||||
};
|
||||
|
||||
const proxyReq = http.request(options, (proxyRes) => {
|
||||
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
||||
proxyRes.pipe(res);
|
||||
});
|
||||
|
||||
proxyReq.on('error', (err) => {
|
||||
if (!res.headersSent) {
|
||||
res.status(502).json({ error: 'Plugin server error', details: err.message });
|
||||
} else {
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
// Forward body (already parsed by express JSON middleware, so re-stringify).
|
||||
// Check content-length to detect whether a body was actually sent, since
|
||||
// req.body can be falsy for valid payloads like 0, false, null, or {}.
|
||||
const hasBody = req.headers['content-length'] && parseInt(req.headers['content-length'], 10) > 0;
|
||||
if (hasBody && req.body !== undefined) {
|
||||
const bodyStr = JSON.stringify(req.body);
|
||||
proxyReq.setHeader('content-length', Buffer.byteLength(bodyStr));
|
||||
proxyReq.write(bodyStr);
|
||||
}
|
||||
|
||||
proxyReq.end();
|
||||
});
|
||||
|
||||
// DELETE /:name — Uninstall plugin (stops server first)
|
||||
router.delete('/:name', async (req, res) => {
|
||||
try {
|
||||
const pluginName = req.params.name;
|
||||
|
||||
// Validate name format to prevent path traversal
|
||||
if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) {
|
||||
return res.status(400).json({ error: 'Invalid plugin name' });
|
||||
}
|
||||
|
||||
// Stop server and wait for the process to fully exit before deleting files
|
||||
if (isPluginRunning(pluginName)) {
|
||||
await stopPluginServer(pluginName);
|
||||
}
|
||||
|
||||
await uninstallPlugin(pluginName);
|
||||
res.json({ success: true, name: pluginName });
|
||||
} catch (err) {
|
||||
res.status(400).json({ error: 'Failed to uninstall plugin', details: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
548
server/src/modules/projects/projects.routes.js
Normal file
548
server/src/modules/projects/projects.routes.js
Normal file
@@ -0,0 +1,548 @@
|
||||
import express from 'express';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import { spawn } from 'child_process';
|
||||
import os from 'os';
|
||||
import { addProjectManually } from '../../../projects.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
function sanitizeGitError(message, token) {
|
||||
if (!message || !token) return message;
|
||||
return message.replace(new RegExp(token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), '***');
|
||||
}
|
||||
|
||||
// Configure allowed workspace root (defaults to user's home directory)
|
||||
export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir();
|
||||
|
||||
// System-critical paths that should never be used as workspace directories
|
||||
export const FORBIDDEN_PATHS = [
|
||||
// Unix
|
||||
'/',
|
||||
'/etc',
|
||||
'/bin',
|
||||
'/sbin',
|
||||
'/usr',
|
||||
'/dev',
|
||||
'/proc',
|
||||
'/sys',
|
||||
'/var',
|
||||
'/boot',
|
||||
'/root',
|
||||
'/lib',
|
||||
'/lib64',
|
||||
'/opt',
|
||||
'/tmp',
|
||||
'/run',
|
||||
// Windows
|
||||
'C:\\Windows',
|
||||
'C:\\Program Files',
|
||||
'C:\\Program Files (x86)',
|
||||
'C:\\ProgramData',
|
||||
'C:\\System Volume Information',
|
||||
'C:\\$Recycle.Bin'
|
||||
];
|
||||
|
||||
/**
|
||||
* Validates that a path is safe for workspace operations
|
||||
* @param {string} requestedPath - The path to validate
|
||||
* @returns {Promise<{valid: boolean, resolvedPath?: string, error?: string}>}
|
||||
*/
|
||||
export async function validateWorkspacePath(requestedPath) {
|
||||
try {
|
||||
// Resolve to absolute path
|
||||
let absolutePath = path.resolve(requestedPath);
|
||||
|
||||
// Check if path is a forbidden system directory
|
||||
const normalizedPath = path.normalize(absolutePath);
|
||||
if (FORBIDDEN_PATHS.includes(normalizedPath) || normalizedPath === '/') {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Cannot use system-critical directories as workspace locations'
|
||||
};
|
||||
}
|
||||
|
||||
// Additional check for paths starting with forbidden directories
|
||||
for (const forbidden of FORBIDDEN_PATHS) {
|
||||
if (normalizedPath === forbidden ||
|
||||
normalizedPath.startsWith(forbidden + path.sep)) {
|
||||
// Exception: /var/tmp and similar user-accessible paths might be allowed
|
||||
// but /var itself and most /var subdirectories should be blocked
|
||||
if (forbidden === '/var' &&
|
||||
(normalizedPath.startsWith('/var/tmp') ||
|
||||
normalizedPath.startsWith('/var/folders'))) {
|
||||
continue; // Allow these specific cases
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot create workspace in system directory: ${forbidden}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Try to resolve the real path (following symlinks)
|
||||
let realPath;
|
||||
try {
|
||||
// Check if path exists to resolve real path
|
||||
await fs.access(absolutePath);
|
||||
realPath = await fs.realpath(absolutePath);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// Path doesn't exist yet - check parent directory
|
||||
let parentPath = path.dirname(absolutePath);
|
||||
try {
|
||||
const parentRealPath = await fs.realpath(parentPath);
|
||||
|
||||
// Reconstruct the full path with real parent
|
||||
realPath = path.join(parentRealPath, path.basename(absolutePath));
|
||||
} catch (parentError) {
|
||||
if (parentError.code === 'ENOENT') {
|
||||
// Parent doesn't exist either - use the absolute path as-is
|
||||
// We'll validate it's within allowed root
|
||||
realPath = absolutePath;
|
||||
} else {
|
||||
throw parentError;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the workspace root to its real path
|
||||
const resolvedWorkspaceRoot = await fs.realpath(WORKSPACES_ROOT);
|
||||
|
||||
// Ensure the resolved path is contained within the allowed workspace root
|
||||
if (!realPath.startsWith(resolvedWorkspaceRoot + path.sep) &&
|
||||
realPath !== resolvedWorkspaceRoot) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}`
|
||||
};
|
||||
}
|
||||
|
||||
// Additional symlink check for existing paths
|
||||
try {
|
||||
await fs.access(absolutePath);
|
||||
const stats = await fs.lstat(absolutePath);
|
||||
|
||||
if (stats.isSymbolicLink()) {
|
||||
// Verify symlink target is also within allowed root
|
||||
const linkTarget = await fs.readlink(absolutePath);
|
||||
const resolvedTarget = path.resolve(path.dirname(absolutePath), linkTarget);
|
||||
const realTarget = await fs.realpath(resolvedTarget);
|
||||
|
||||
if (!realTarget.startsWith(resolvedWorkspaceRoot + path.sep) &&
|
||||
realTarget !== resolvedWorkspaceRoot) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Symlink target is outside the allowed workspace root'
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
// Path doesn't exist - that's fine for new workspace creation
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
resolvedPath: realPath
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Path validation failed: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new workspace
|
||||
* POST /api/projects/create-workspace
|
||||
*
|
||||
* Body:
|
||||
* - workspaceType: 'existing' | 'new'
|
||||
* - path: string (workspace path)
|
||||
* - githubUrl?: string (optional, for new workspaces)
|
||||
* - githubTokenId?: number (optional, ID of stored token)
|
||||
* - newGithubToken?: string (optional, one-time token)
|
||||
*/
|
||||
router.post('/create-workspace', async (req, res) => {
|
||||
try {
|
||||
const { workspaceType, path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!workspaceType || !workspacePath) {
|
||||
return res.status(400).json({ error: 'workspaceType and path are required' });
|
||||
}
|
||||
|
||||
if (!['existing', 'new'].includes(workspaceType)) {
|
||||
return res.status(400).json({ error: 'workspaceType must be "existing" or "new"' });
|
||||
}
|
||||
|
||||
// Validate path safety before any operations
|
||||
const validation = await validateWorkspacePath(workspacePath);
|
||||
if (!validation.valid) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid workspace path',
|
||||
details: validation.error
|
||||
});
|
||||
}
|
||||
|
||||
const absolutePath = validation.resolvedPath;
|
||||
|
||||
// Handle existing workspace
|
||||
if (workspaceType === 'existing') {
|
||||
// Check if the path exists
|
||||
try {
|
||||
await fs.access(absolutePath);
|
||||
const stats = await fs.stat(absolutePath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
return res.status(400).json({ error: 'Path exists but is not a directory' });
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({ error: 'Workspace path does not exist' });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Add the existing workspace to the project list
|
||||
const project = await addProjectManually(absolutePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'Existing workspace added successfully'
|
||||
});
|
||||
}
|
||||
|
||||
// Handle new workspace creation
|
||||
if (workspaceType === 'new') {
|
||||
// Create the directory if it doesn't exist
|
||||
await fs.mkdir(absolutePath, { recursive: true });
|
||||
|
||||
// If GitHub URL is provided, clone the repository
|
||||
if (githubUrl) {
|
||||
let githubToken = null;
|
||||
|
||||
// Get GitHub token if needed
|
||||
if (githubTokenId) {
|
||||
// Fetch token from database
|
||||
const token = await getGithubTokenById(githubTokenId, req.user.id);
|
||||
if (!token) {
|
||||
// Clean up created directory
|
||||
await fs.rm(absolutePath, { recursive: true, force: true });
|
||||
return res.status(404).json({ error: 'GitHub token not found' });
|
||||
}
|
||||
githubToken = token.github_token;
|
||||
} else if (newGithubToken) {
|
||||
githubToken = newGithubToken;
|
||||
}
|
||||
|
||||
// Extract repo name from URL for the clone destination
|
||||
const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = normalizedUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
// Check if clone destination already exists to prevent data loss
|
||||
try {
|
||||
await fs.access(clonePath);
|
||||
return res.status(409).json({
|
||||
error: 'Directory already exists',
|
||||
details: `The destination path "${clonePath}" already exists. Please choose a different location or remove the existing directory.`
|
||||
});
|
||||
} catch (err) {
|
||||
// Directory doesn't exist, which is what we want
|
||||
}
|
||||
|
||||
// Clone the repository into a subfolder
|
||||
try {
|
||||
await cloneGitHubRepository(githubUrl, clonePath, githubToken);
|
||||
} catch (error) {
|
||||
// Only clean up if clone created partial data (check if dir exists and is empty or partial)
|
||||
try {
|
||||
const stats = await fs.stat(clonePath);
|
||||
if (stats.isDirectory()) {
|
||||
await fs.rm(clonePath, { recursive: true, force: true });
|
||||
}
|
||||
} catch (cleanupError) {
|
||||
// Directory doesn't exist or cleanup failed - ignore
|
||||
}
|
||||
throw new Error(`Failed to clone repository: ${error.message}`);
|
||||
}
|
||||
|
||||
// Add the cloned repo path to the project list
|
||||
const project = await addProjectManually(clonePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'New workspace created and repository cloned successfully'
|
||||
});
|
||||
}
|
||||
|
||||
// Add the new workspace to the project list (no clone)
|
||||
const project = await addProjectManually(absolutePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'New workspace created successfully'
|
||||
});
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error creating workspace:', error);
|
||||
res.status(500).json({
|
||||
error: error.message || 'Failed to create workspace',
|
||||
details: process.env.NODE_ENV === 'development' ? error.stack : undefined
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function to get GitHub token from database
|
||||
*/
|
||||
async function getGithubTokenById(tokenId, userId) {
|
||||
const { db } = await import('../../../database/db.js');
|
||||
|
||||
const credential = db.prepare(
|
||||
'SELECT * FROM user_credentials WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1'
|
||||
).get(tokenId, userId, 'github_token');
|
||||
|
||||
// Return in the expected format (github_token field for compatibility)
|
||||
if (credential) {
|
||||
return {
|
||||
...credential,
|
||||
github_token: credential.credential_value
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone repository with progress streaming (SSE)
|
||||
* GET /api/projects/clone-progress
|
||||
*/
|
||||
router.get('/clone-progress', async (req, res) => {
|
||||
const { path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.query;
|
||||
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const sendEvent = (type, data) => {
|
||||
res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`);
|
||||
};
|
||||
|
||||
try {
|
||||
if (!workspacePath || !githubUrl) {
|
||||
sendEvent('error', { message: 'workspacePath and githubUrl are required' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const validation = await validateWorkspacePath(workspacePath);
|
||||
if (!validation.valid) {
|
||||
sendEvent('error', { message: validation.error });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const absolutePath = validation.resolvedPath;
|
||||
|
||||
await fs.mkdir(absolutePath, { recursive: true });
|
||||
|
||||
let githubToken = null;
|
||||
if (githubTokenId) {
|
||||
const token = await getGithubTokenById(parseInt(githubTokenId), req.user.id);
|
||||
if (!token) {
|
||||
await fs.rm(absolutePath, { recursive: true, force: true });
|
||||
sendEvent('error', { message: 'GitHub token not found' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
githubToken = token.github_token;
|
||||
} else if (newGithubToken) {
|
||||
githubToken = newGithubToken;
|
||||
}
|
||||
|
||||
const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = normalizedUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
// Check if clone destination already exists to prevent data loss
|
||||
try {
|
||||
await fs.access(clonePath);
|
||||
sendEvent('error', { message: `Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.` });
|
||||
res.end();
|
||||
return;
|
||||
} catch (err) {
|
||||
// Directory doesn't exist, which is what we want
|
||||
}
|
||||
|
||||
let cloneUrl = githubUrl;
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(githubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch (error) {
|
||||
// SSH URL or invalid - use as-is
|
||||
}
|
||||
}
|
||||
|
||||
sendEvent('progress', { message: `Cloning into '${repoName}'...` });
|
||||
|
||||
const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, clonePath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0'
|
||||
}
|
||||
});
|
||||
|
||||
let lastError = '';
|
||||
|
||||
gitProcess.stdout.on('data', (data) => {
|
||||
const message = data.toString().trim();
|
||||
if (message) {
|
||||
sendEvent('progress', { message });
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
const message = data.toString().trim();
|
||||
lastError = message;
|
||||
if (message) {
|
||||
sendEvent('progress', { message });
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on('close', async (code) => {
|
||||
if (code === 0) {
|
||||
try {
|
||||
const project = await addProjectManually(clonePath);
|
||||
sendEvent('complete', { project, message: 'Repository cloned successfully' });
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: `Clone succeeded but failed to add project: ${error.message}` });
|
||||
}
|
||||
} else {
|
||||
const sanitizedError = sanitizeGitError(lastError, githubToken);
|
||||
let errorMessage = 'Git clone failed';
|
||||
if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) {
|
||||
errorMessage = 'Authentication failed. Please check your credentials.';
|
||||
} else if (lastError.includes('Repository not found')) {
|
||||
errorMessage = 'Repository not found. Please check the URL and ensure you have access.';
|
||||
} else if (lastError.includes('already exists')) {
|
||||
errorMessage = 'Directory already exists';
|
||||
} else if (sanitizedError) {
|
||||
errorMessage = sanitizedError;
|
||||
}
|
||||
try {
|
||||
await fs.rm(clonePath, { recursive: true, force: true });
|
||||
} catch (cleanupError) {
|
||||
console.error('Failed to clean up after clone failure:', sanitizeGitError(cleanupError.message, githubToken));
|
||||
}
|
||||
sendEvent('error', { message: errorMessage });
|
||||
}
|
||||
res.end();
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
sendEvent('error', { message: 'Git is not installed or not in PATH' });
|
||||
} else {
|
||||
sendEvent('error', { message: error.message });
|
||||
}
|
||||
res.end();
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
gitProcess.kill();
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: error.message });
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function to clone a GitHub repository
|
||||
*/
|
||||
function cloneGitHubRepository(githubUrl, destinationPath, githubToken = null) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let cloneUrl = githubUrl;
|
||||
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(githubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch (error) {
|
||||
// SSH URL - use as-is
|
||||
}
|
||||
}
|
||||
|
||||
const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, destinationPath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0'
|
||||
}
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
gitProcess.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve({ stdout, stderr });
|
||||
} else {
|
||||
let errorMessage = 'Git clone failed';
|
||||
|
||||
if (stderr.includes('Authentication failed') || stderr.includes('could not read Username')) {
|
||||
errorMessage = 'Authentication failed. Please check your GitHub token.';
|
||||
} else if (stderr.includes('Repository not found')) {
|
||||
errorMessage = 'Repository not found. Please check the URL and ensure you have access.';
|
||||
} else if (stderr.includes('already exists')) {
|
||||
errorMessage = 'Directory already exists';
|
||||
} else if (stderr) {
|
||||
errorMessage = stderr;
|
||||
}
|
||||
|
||||
reject(new Error(errorMessage));
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
reject(new Error('Git is not installed or not in PATH'));
|
||||
} else {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default router;
|
||||
80
server/src/modules/push-sub/push-sub.routes.js
Normal file
80
server/src/modules/push-sub/push-sub.routes.js
Normal file
@@ -0,0 +1,80 @@
|
||||
import express from 'express';
|
||||
import { notificationPreferencesDb, pushSubscriptionsDb } from '../../../database/db.js';
|
||||
import { getPublicKey } from '../../../services/vapid-keys.js';
|
||||
import { createNotificationEvent, notifyUserIfEnabled } from '../../../services/notification-orchestrator.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// ===============================
|
||||
// Push Subscription Management
|
||||
// ===============================
|
||||
|
||||
router.get('/vapid-public-key', async (req, res) => {
|
||||
try {
|
||||
const publicKey = getPublicKey();
|
||||
res.json({ publicKey });
|
||||
} catch (error) {
|
||||
console.error('Error fetching VAPID public key:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch VAPID public key' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/subscribe', async (req, res) => {
|
||||
try {
|
||||
const { endpoint, keys } = req.body;
|
||||
if (!endpoint || !keys?.p256dh || !keys?.auth) {
|
||||
return res.status(400).json({ error: 'Missing subscription fields' });
|
||||
}
|
||||
pushSubscriptionsDb.saveSubscription(req.user.id, endpoint, keys.p256dh, keys.auth);
|
||||
|
||||
// Enable webPush in preferences so the confirmation goes through the full pipeline
|
||||
const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id);
|
||||
if (!currentPrefs?.channels?.webPush) {
|
||||
notificationPreferencesDb.updatePreferences(req.user.id, {
|
||||
...currentPrefs,
|
||||
channels: { ...currentPrefs?.channels, webPush: true },
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
|
||||
// Send a confirmation push through the full notification pipeline
|
||||
const event = createNotificationEvent({
|
||||
provider: 'system',
|
||||
kind: 'info',
|
||||
code: 'push.enabled',
|
||||
meta: { message: 'Push notifications are now enabled!' },
|
||||
severity: 'info'
|
||||
});
|
||||
notifyUserIfEnabled({ userId: req.user.id, event });
|
||||
} catch (error) {
|
||||
console.error('Error saving push subscription:', error);
|
||||
res.status(500).json({ error: 'Failed to save push subscription' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/unsubscribe', async (req, res) => {
|
||||
try {
|
||||
const { endpoint } = req.body;
|
||||
if (!endpoint) {
|
||||
return res.status(400).json({ error: 'Missing endpoint' });
|
||||
}
|
||||
pushSubscriptionsDb.removeSubscription(endpoint);
|
||||
|
||||
// Disable webPush in preferences to match subscription state
|
||||
const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id);
|
||||
if (currentPrefs?.channels?.webPush) {
|
||||
notificationPreferencesDb.updatePreferences(req.user.id, {
|
||||
...currentPrefs,
|
||||
channels: { ...currentPrefs.channels, webPush: false },
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Error removing push subscription:', error);
|
||||
res.status(500).json({ error: 'Failed to remove push subscription' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
276
server/src/modules/settings/settings.routes.js
Normal file
276
server/src/modules/settings/settings.routes.js
Normal file
@@ -0,0 +1,276 @@
|
||||
import express from 'express';
|
||||
import { apiKeysDb, credentialsDb, notificationPreferencesDb, pushSubscriptionsDb } from '../../../database/db.js';
|
||||
import { getPublicKey } from '../../../services/vapid-keys.js';
|
||||
import { createNotificationEvent, notifyUserIfEnabled } from '../../../services/notification-orchestrator.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// ===============================
|
||||
// API Keys Management
|
||||
// ===============================
|
||||
|
||||
// Get all API keys for the authenticated user
|
||||
router.get('/api-keys', async (req, res) => {
|
||||
try {
|
||||
const apiKeys = apiKeysDb.getApiKeys(req.user.id);
|
||||
// Don't send the full API key in the list for security
|
||||
const sanitizedKeys = apiKeys.map(key => ({
|
||||
...key,
|
||||
api_key: key.api_key.substring(0, 10) + '...'
|
||||
}));
|
||||
res.json({ apiKeys: sanitizedKeys });
|
||||
} catch (error) {
|
||||
console.error('Error fetching API keys:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch API keys' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new API key
|
||||
router.post('/api-keys', async (req, res) => {
|
||||
try {
|
||||
const { keyName } = req.body;
|
||||
|
||||
if (!keyName || !keyName.trim()) {
|
||||
return res.status(400).json({ error: 'Key name is required' });
|
||||
}
|
||||
|
||||
const result = apiKeysDb.createApiKey(req.user.id, keyName.trim());
|
||||
res.json({
|
||||
success: true,
|
||||
apiKey: result
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error creating API key:', error);
|
||||
res.status(500).json({ error: 'Failed to create API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete an API key
|
||||
router.delete('/api-keys/:keyId', async (req, res) => {
|
||||
try {
|
||||
const { keyId } = req.params;
|
||||
const success = apiKeysDb.deleteApiKey(req.user.id, parseInt(keyId));
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error deleting API key:', error);
|
||||
res.status(500).json({ error: 'Failed to delete API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// Toggle API key active status
|
||||
router.patch('/api-keys/:keyId/toggle', async (req, res) => {
|
||||
try {
|
||||
const { keyId } = req.params;
|
||||
const { isActive } = req.body;
|
||||
|
||||
if (typeof isActive !== 'boolean') {
|
||||
return res.status(400).json({ error: 'isActive must be a boolean' });
|
||||
}
|
||||
|
||||
const success = apiKeysDb.toggleApiKey(req.user.id, parseInt(keyId), isActive);
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error toggling API key:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// ===============================
|
||||
// Generic Credentials Management
|
||||
// ===============================
|
||||
|
||||
// Get all credentials for the authenticated user (optionally filtered by type)
|
||||
router.get('/credentials', async (req, res) => {
|
||||
try {
|
||||
const { type } = req.query;
|
||||
const credentials = credentialsDb.getCredentials(req.user.id, type || null);
|
||||
// Don't send the actual credential values for security
|
||||
res.json({ credentials });
|
||||
} catch (error) {
|
||||
console.error('Error fetching credentials:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch credentials' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new credential
|
||||
router.post('/credentials', async (req, res) => {
|
||||
try {
|
||||
const { credentialName, credentialType, credentialValue, description } = req.body;
|
||||
|
||||
if (!credentialName || !credentialName.trim()) {
|
||||
return res.status(400).json({ error: 'Credential name is required' });
|
||||
}
|
||||
|
||||
if (!credentialType || !credentialType.trim()) {
|
||||
return res.status(400).json({ error: 'Credential type is required' });
|
||||
}
|
||||
|
||||
if (!credentialValue || !credentialValue.trim()) {
|
||||
return res.status(400).json({ error: 'Credential value is required' });
|
||||
}
|
||||
|
||||
const result = credentialsDb.createCredential(
|
||||
req.user.id,
|
||||
credentialName.trim(),
|
||||
credentialType.trim(),
|
||||
credentialValue.trim(),
|
||||
description?.trim() || null
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
credential: result
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error creating credential:', error);
|
||||
res.status(500).json({ error: 'Failed to create credential' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete a credential
|
||||
router.delete('/credentials/:credentialId', async (req, res) => {
|
||||
try {
|
||||
const { credentialId } = req.params;
|
||||
const success = credentialsDb.deleteCredential(req.user.id, parseInt(credentialId));
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'Credential not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error deleting credential:', error);
|
||||
res.status(500).json({ error: 'Failed to delete credential' });
|
||||
}
|
||||
});
|
||||
|
||||
// Toggle credential active status
|
||||
router.patch('/credentials/:credentialId/toggle', async (req, res) => {
|
||||
try {
|
||||
const { credentialId } = req.params;
|
||||
const { isActive } = req.body;
|
||||
|
||||
if (typeof isActive !== 'boolean') {
|
||||
return res.status(400).json({ error: 'isActive must be a boolean' });
|
||||
}
|
||||
|
||||
const success = credentialsDb.toggleCredential(req.user.id, parseInt(credentialId), isActive);
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'Credential not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error toggling credential:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle credential' });
|
||||
}
|
||||
});
|
||||
|
||||
// ===============================
|
||||
// Notification Preferences
|
||||
// ===============================
|
||||
|
||||
router.get('/notification-preferences', async (req, res) => {
|
||||
try {
|
||||
const preferences = notificationPreferencesDb.getPreferences(req.user.id);
|
||||
res.json({ success: true, preferences });
|
||||
} catch (error) {
|
||||
console.error('Error fetching notification preferences:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch notification preferences' });
|
||||
}
|
||||
});
|
||||
|
||||
router.put('/notification-preferences', async (req, res) => {
|
||||
try {
|
||||
const preferences = notificationPreferencesDb.updatePreferences(req.user.id, req.body || {});
|
||||
res.json({ success: true, preferences });
|
||||
} catch (error) {
|
||||
console.error('Error saving notification preferences:', error);
|
||||
res.status(500).json({ error: 'Failed to save notification preferences' });
|
||||
}
|
||||
});
|
||||
|
||||
// ===============================
|
||||
// Push Subscription Management
|
||||
// ===============================
|
||||
|
||||
router.get('/push/vapid-public-key', async (req, res) => {
|
||||
try {
|
||||
const publicKey = getPublicKey();
|
||||
res.json({ publicKey });
|
||||
} catch (error) {
|
||||
console.error('Error fetching VAPID public key:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch VAPID public key' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/push/subscribe', async (req, res) => {
|
||||
try {
|
||||
const { endpoint, keys } = req.body;
|
||||
if (!endpoint || !keys?.p256dh || !keys?.auth) {
|
||||
return res.status(400).json({ error: 'Missing subscription fields' });
|
||||
}
|
||||
pushSubscriptionsDb.saveSubscription(req.user.id, endpoint, keys.p256dh, keys.auth);
|
||||
|
||||
// Enable webPush in preferences so the confirmation goes through the full pipeline
|
||||
const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id);
|
||||
if (!currentPrefs?.channels?.webPush) {
|
||||
notificationPreferencesDb.updatePreferences(req.user.id, {
|
||||
...currentPrefs,
|
||||
channels: { ...currentPrefs?.channels, webPush: true },
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
|
||||
// Send a confirmation push through the full notification pipeline
|
||||
const event = createNotificationEvent({
|
||||
provider: 'system',
|
||||
kind: 'info',
|
||||
code: 'push.enabled',
|
||||
meta: { message: 'Push notifications are now enabled!' },
|
||||
severity: 'info'
|
||||
});
|
||||
notifyUserIfEnabled({ userId: req.user.id, event });
|
||||
} catch (error) {
|
||||
console.error('Error saving push subscription:', error);
|
||||
res.status(500).json({ error: 'Failed to save push subscription' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/push/unsubscribe', async (req, res) => {
|
||||
try {
|
||||
const { endpoint } = req.body;
|
||||
if (!endpoint) {
|
||||
return res.status(400).json({ error: 'Missing endpoint' });
|
||||
}
|
||||
pushSubscriptionsDb.removeSubscription(endpoint);
|
||||
|
||||
// Disable webPush in preferences to match subscription state
|
||||
const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id);
|
||||
if (currentPrefs?.channels?.webPush) {
|
||||
notificationPreferencesDb.updatePreferences(req.user.id, {
|
||||
...currentPrefs,
|
||||
channels: { ...currentPrefs.channels, webPush: false },
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error('Error removing push subscription:', error);
|
||||
res.status(500).json({ error: 'Failed to remove push subscription' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
1963
server/src/modules/taskmaster/taskmaster.routes.js
Normal file
1963
server/src/modules/taskmaster/taskmaster.routes.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -13,7 +13,7 @@ import { getConnectableHost } from '@/shared/utils/networkHosts.js';
|
||||
import { logger } from '@/shared/utils/logger.js';
|
||||
import { authRoutes } from '@/modules/auth/auth.routes.js';
|
||||
import { userRoutes } from '@/modules/user/user.routes.js';
|
||||
import { authenticateToken } from '@/modules/auth/auth.middleware.js';
|
||||
import { validateApiKey, authenticateToken } from '@/modules/auth/auth.middleware.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
@@ -33,6 +33,52 @@ const HOST = process.env.HOST || '0.0.0.0';
|
||||
const DISPLAY_HOST = getConnectableHost(HOST);
|
||||
const VITE_PORT = process.env.VITE_PORT || 5173;
|
||||
|
||||
async function importRoute(relativePath: string): Promise<any> {
|
||||
const moduleUrl = new URL(relativePath, import.meta.url);
|
||||
const routeModule = await import(moduleUrl.href);
|
||||
return routeModule.default;
|
||||
}
|
||||
|
||||
const [
|
||||
gitRoutes,
|
||||
mcpRoutes,
|
||||
cursorRoutes,
|
||||
taskmasterRoutes,
|
||||
mcpUtilsRoutes,
|
||||
commandsRoutes,
|
||||
settingsRoutes,
|
||||
apiKeysRoutes,
|
||||
credentialsRoutes,
|
||||
notificationPreferencesRoutes,
|
||||
pushSubRoutes,
|
||||
agentRoutes,
|
||||
projectsRoutes,
|
||||
cliAuthRoutes,
|
||||
codexRoutes,
|
||||
geminiRoutes,
|
||||
pluginsRoutes,
|
||||
messagesRoutes,
|
||||
] = await Promise.all([
|
||||
importRoute('./modules/git/git.routes.js'),
|
||||
importRoute('./modules/mcp/mcp.routes.js'),
|
||||
importRoute('./modules/cursor/cursor.routes.js'),
|
||||
importRoute('./modules/taskmaster/taskmaster.routes.js'),
|
||||
importRoute('./modules/mcp-utils/mcp-utils.routes.js'),
|
||||
importRoute('./modules/commands/commands.routes.js'),
|
||||
importRoute('./modules/settings/settings.routes.js'),
|
||||
importRoute('./modules/api-keys/api-keys.routes.js'),
|
||||
importRoute('./modules/credentials/credentials.routes.js'),
|
||||
importRoute('./modules/notification-preferences/notification-preferences.routes.js'),
|
||||
importRoute('./modules/push-sub/push-sub.routes.js'),
|
||||
importRoute('./modules/agent/agent.routes.js'),
|
||||
importRoute('./modules/projects/projects.routes.js'),
|
||||
importRoute('./modules/cli-auth/cli-auth.routes.js'),
|
||||
importRoute('./modules/codex/codex.routes.js'),
|
||||
importRoute('./modules/gemini/gemini.routes.js'),
|
||||
importRoute('./modules/plugins/plugins.routes.js'),
|
||||
importRoute('./modules/messages/messages.routes.js'),
|
||||
]);
|
||||
|
||||
// ---------- MIDDLEWARES ----------------
|
||||
app.use(cors({ exposedHeaders: ['X-Refreshed-Token'] }));
|
||||
app.use(express.json({
|
||||
@@ -60,12 +106,63 @@ app.use((req, res, next) => {
|
||||
});
|
||||
|
||||
|
||||
// Optional API key validation (if configured)
|
||||
app.use('/api', validateApiKey);
|
||||
|
||||
// Authentication routes (public)
|
||||
app.use('/api/auth', authRoutes);
|
||||
|
||||
// Projects API Routes (protected)
|
||||
app.use('/api/projects', authenticateToken, projectsRoutes);
|
||||
|
||||
// Git API Routes (protected)
|
||||
app.use('/api/git', authenticateToken, gitRoutes);
|
||||
|
||||
// MCP API Routes (protected)
|
||||
app.use('/api/mcp', authenticateToken, mcpRoutes);
|
||||
|
||||
// Cursor API Routes (protected)
|
||||
app.use('/api/cursor', authenticateToken, cursorRoutes);
|
||||
|
||||
// TaskMaster API Routes (protected)
|
||||
app.use('/api/taskmaster', authenticateToken, taskmasterRoutes);
|
||||
|
||||
// MCP utilities
|
||||
app.use('/api/mcp-utils', authenticateToken, mcpUtilsRoutes);
|
||||
|
||||
// Commands API Routes (protected)
|
||||
app.use('/api/commands', authenticateToken, commandsRoutes);
|
||||
|
||||
// Settings API Routes (protected, legacy endpoint)
|
||||
app.use('/api/settings', authenticateToken, settingsRoutes);
|
||||
|
||||
// Settings sub-modules API Routes (protected)
|
||||
app.use('/api/api-keys', authenticateToken, apiKeysRoutes);
|
||||
app.use('/api/credentials', authenticateToken, credentialsRoutes);
|
||||
app.use('/api/notification-preferences', authenticateToken, notificationPreferencesRoutes);
|
||||
app.use('/api/push-sub', authenticateToken, pushSubRoutes);
|
||||
|
||||
// CLI Authentication API Routes (protected)
|
||||
app.use('/api/cli', authenticateToken, cliAuthRoutes);
|
||||
|
||||
// User API Routes (protected)
|
||||
app.use('/api/user', authenticateToken, userRoutes);
|
||||
|
||||
// Codex API Routes (protected)
|
||||
app.use('/api/codex', authenticateToken, codexRoutes);
|
||||
|
||||
// Gemini API Routes (protected)
|
||||
app.use('/api/gemini', authenticateToken, geminiRoutes);
|
||||
|
||||
// Plugins API Routes (protected)
|
||||
app.use('/api/plugins', authenticateToken, pluginsRoutes);
|
||||
|
||||
// Unified session messages route (protected)
|
||||
app.use('/api/sessions', authenticateToken, messagesRoutes);
|
||||
|
||||
// Agent API Routes (uses API key authentication)
|
||||
app.use('/api/agent', agentRoutes);
|
||||
|
||||
// This matches files found in the root public folder (like api-docs.html when we run `/api-docs.html`).
|
||||
// If the file is found, it's automatically sent. If it is not, it passes it to the next route checker.
|
||||
// This will run in production as well as development URLs.
|
||||
@@ -145,4 +242,4 @@ async function main() {
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
await main();
|
||||
|
||||
@@ -66,7 +66,6 @@ export default function App() {
|
||||
// <Routes>
|
||||
// <Route path="/" element={<AppContent />} />
|
||||
// <Route path="/session/:sessionId" element={<AppContent />} />
|
||||
// <Route path='/' element={<SampleElement />} />
|
||||
// </Routes>
|
||||
// </Router>
|
||||
// </ProtectedRoute>
|
||||
|
||||
@@ -28,7 +28,7 @@ const parseJson = async <T>(response: Response): Promise<T> => {
|
||||
};
|
||||
|
||||
export const fetchGithubTokenCredentials = async () => {
|
||||
const response = await api.get('/settings/credentials?type=github_token');
|
||||
const response = await api.get('/credentials?type=github_token');
|
||||
const data = await parseJson<CredentialsResponse>(response);
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -43,8 +43,8 @@ export function useCredentialsSettings({
|
||||
setLoading(true);
|
||||
|
||||
const [apiKeysResponse, credentialsResponse] = await Promise.all([
|
||||
authenticatedFetch('/api/settings/api-keys'),
|
||||
authenticatedFetch('/api/settings/credentials?type=github_token'),
|
||||
authenticatedFetch('/api/api-keys'),
|
||||
authenticatedFetch('/api/credentials?type=github_token'),
|
||||
]);
|
||||
|
||||
const [apiKeysPayload, credentialsPayload] = await Promise.all([
|
||||
@@ -67,7 +67,7 @@ export function useCredentialsSettings({
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await authenticatedFetch('/api/settings/api-keys', {
|
||||
const response = await authenticatedFetch('/api/api-keys', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ keyName: newKeyName.trim() }),
|
||||
});
|
||||
@@ -95,7 +95,7 @@ export function useCredentialsSettings({
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await authenticatedFetch(`/api/settings/api-keys/${keyId}`, {
|
||||
const response = await authenticatedFetch(`/api/api-keys/${keyId}`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
@@ -113,7 +113,7 @@ export function useCredentialsSettings({
|
||||
|
||||
const toggleApiKey = useCallback(async (keyId: string, isActive: boolean) => {
|
||||
try {
|
||||
const response = await authenticatedFetch(`/api/settings/api-keys/${keyId}/toggle`, {
|
||||
const response = await authenticatedFetch(`/api/api-keys/${keyId}/toggle`, {
|
||||
method: 'PATCH',
|
||||
body: JSON.stringify({ isActive: !isActive }),
|
||||
});
|
||||
@@ -136,7 +136,7 @@ export function useCredentialsSettings({
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await authenticatedFetch('/api/settings/credentials', {
|
||||
const response = await authenticatedFetch('/api/credentials', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
credentialName: newGithubName.trim(),
|
||||
@@ -169,7 +169,7 @@ export function useCredentialsSettings({
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await authenticatedFetch(`/api/settings/credentials/${credentialId}`, {
|
||||
const response = await authenticatedFetch(`/api/credentials/${credentialId}`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
@@ -187,7 +187,7 @@ export function useCredentialsSettings({
|
||||
|
||||
const toggleGithubCredential = useCallback(async (credentialId: string, isActive: boolean) => {
|
||||
try {
|
||||
const response = await authenticatedFetch(`/api/settings/credentials/${credentialId}/toggle`, {
|
||||
const response = await authenticatedFetch(`/api/credentials/${credentialId}/toggle`, {
|
||||
method: 'PATCH',
|
||||
body: JSON.stringify({ isActive: !isActive }),
|
||||
});
|
||||
|
||||
@@ -692,7 +692,7 @@ export function useSettingsController({ isOpen, initialTab, projects, onClose }:
|
||||
setGeminiPermissionMode(savedGeminiSettings.permissionMode || 'default');
|
||||
|
||||
try {
|
||||
const notificationResponse = await authenticatedFetch('/api/settings/notification-preferences');
|
||||
const notificationResponse = await authenticatedFetch('/api/notification-preferences');
|
||||
if (notificationResponse.ok) {
|
||||
const notificationData = await toResponseJson<NotificationPreferencesResponse>(notificationResponse);
|
||||
if (notificationData.success && notificationData.preferences) {
|
||||
@@ -767,7 +767,7 @@ export function useSettingsController({ isOpen, initialTab, projects, onClose }:
|
||||
lastUpdated: now,
|
||||
}));
|
||||
|
||||
const notificationResponse = await authenticatedFetch('/api/settings/notification-preferences', {
|
||||
const notificationResponse = await authenticatedFetch('/api/notification-preferences', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(notificationPreferences),
|
||||
});
|
||||
|
||||
@@ -52,7 +52,7 @@ export function useWebPush(): WebPushState {
|
||||
setPermission(perm);
|
||||
if (perm !== 'granted') return;
|
||||
|
||||
const keyRes = await authenticatedFetch('/api/settings/push/vapid-public-key');
|
||||
const keyRes = await authenticatedFetch('/api/push-sub/vapid-public-key');
|
||||
const { publicKey } = await keyRes.json();
|
||||
|
||||
const registration = await navigator.serviceWorker.ready;
|
||||
@@ -62,7 +62,7 @@ export function useWebPush(): WebPushState {
|
||||
});
|
||||
|
||||
const subJson = subscription.toJSON();
|
||||
await authenticatedFetch('/api/settings/push/subscribe', {
|
||||
await authenticatedFetch('/api/push-sub/subscribe', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
endpoint: subJson.endpoint,
|
||||
@@ -86,7 +86,7 @@ export function useWebPush(): WebPushState {
|
||||
if (subscription) {
|
||||
const endpoint = subscription.endpoint;
|
||||
await subscription.unsubscribe();
|
||||
await authenticatedFetch('/api/settings/push/unsubscribe', {
|
||||
await authenticatedFetch('/api/push-sub/unsubscribe', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ endpoint }),
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user