mirror of
https://github.com/siteboon/claudecodeui.git
synced 2025-12-09 03:39:37 +00:00
feat(api): add API for one-shot prompt generatio, key authentication system and git commit message generation
Implement comprehensive API key management functionality including generation, validation, and CRUD operations. Changes: - Add API key database schema and operations (create, validate, delete, toggle) - Generating a commit message will now work properly with claude sdk and cursor cli and return a suggested commit message - Implement crypto-based key generation with 'ck_' prefix - Add session ID tracking in claude-sdk.js and cursor-cli.js - Update database layer with API key validation and last_used tracking - Support multi-user API key management with user association This enables secure programmatic access to the agent service
This commit is contained in:
@@ -378,6 +378,11 @@ async function queryClaudeSDK(command, options = {}, ws) {
|
||||
capturedSessionId = message.session_id;
|
||||
addSession(capturedSessionId, queryInstance, tempImagePaths, tempDir);
|
||||
|
||||
// Set session ID on writer
|
||||
if (ws.setSessionId && typeof ws.setSessionId === 'function') {
|
||||
ws.setSessionId(capturedSessionId);
|
||||
}
|
||||
|
||||
// Send session-created event only once for new sessions
|
||||
if (!sessionId && !sessionCreatedSent) {
|
||||
sessionCreatedSent = true;
|
||||
|
||||
@@ -94,6 +94,11 @@ async function spawnCursor(command, options = {}, ws) {
|
||||
activeCursorProcesses.set(capturedSessionId, cursorProcess);
|
||||
}
|
||||
|
||||
// Set session ID on writer (for API endpoint compatibility)
|
||||
if (ws.setSessionId && typeof ws.setSessionId === 'function') {
|
||||
ws.setSessionId(capturedSessionId);
|
||||
}
|
||||
|
||||
// Send session-created event only once for new sessions
|
||||
if (!sessionId && !sessionCreatedSent) {
|
||||
sessionCreatedSent = true;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname } from 'path';
|
||||
|
||||
@@ -94,8 +95,169 @@ const userDb = {
|
||||
}
|
||||
};
|
||||
|
||||
// API Keys database operations
|
||||
const apiKeysDb = {
|
||||
// Generate a new API key
|
||||
generateApiKey: () => {
|
||||
return 'ck_' + crypto.randomBytes(32).toString('hex');
|
||||
},
|
||||
|
||||
// Create a new API key
|
||||
createApiKey: (userId, keyName) => {
|
||||
try {
|
||||
const apiKey = apiKeysDb.generateApiKey();
|
||||
const stmt = db.prepare('INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)');
|
||||
const result = stmt.run(userId, keyName, apiKey);
|
||||
return { id: result.lastInsertRowid, keyName, apiKey };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get all API keys for a user
|
||||
getApiKeys: (userId) => {
|
||||
try {
|
||||
const rows = db.prepare('SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC').all(userId);
|
||||
return rows;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Validate API key and get user
|
||||
validateApiKey: (apiKey) => {
|
||||
try {
|
||||
const row = db.prepare(`
|
||||
SELECT u.id, u.username, ak.id as api_key_id
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1
|
||||
`).get(apiKey);
|
||||
|
||||
if (row) {
|
||||
// Update last_used timestamp
|
||||
db.prepare('UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?').run(row.api_key_id);
|
||||
}
|
||||
|
||||
return row;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Delete an API key
|
||||
deleteApiKey: (userId, apiKeyId) => {
|
||||
try {
|
||||
const stmt = db.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Toggle API key active status
|
||||
toggleApiKey: (userId, apiKeyId, isActive) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(isActive ? 1 : 0, apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// User credentials database operations (for GitHub tokens, GitLab tokens, etc.)
|
||||
const credentialsDb = {
|
||||
// Create a new credential
|
||||
createCredential: (userId, credentialName, credentialType, credentialValue, description = null) => {
|
||||
try {
|
||||
const stmt = db.prepare('INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)');
|
||||
const result = stmt.run(userId, credentialName, credentialType, credentialValue, description);
|
||||
return { id: result.lastInsertRowid, credentialName, credentialType };
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get all credentials for a user, optionally filtered by type
|
||||
getCredentials: (userId, credentialType = null) => {
|
||||
try {
|
||||
let query = 'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ?';
|
||||
const params = [userId];
|
||||
|
||||
if (credentialType) {
|
||||
query += ' AND credential_type = ?';
|
||||
params.push(credentialType);
|
||||
}
|
||||
|
||||
query += ' ORDER BY created_at DESC';
|
||||
|
||||
const rows = db.prepare(query).all(...params);
|
||||
return rows;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Get active credential value for a user by type (returns most recent active)
|
||||
getActiveCredential: (userId, credentialType) => {
|
||||
try {
|
||||
const row = db.prepare('SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1').get(userId, credentialType);
|
||||
return row?.credential_value || null;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Delete a credential
|
||||
deleteCredential: (userId, credentialId) => {
|
||||
try {
|
||||
const stmt = db.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(credentialId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// Toggle credential active status
|
||||
toggleCredential: (userId, credentialId, isActive) => {
|
||||
try {
|
||||
const stmt = db.prepare('UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?');
|
||||
const result = stmt.run(isActive ? 1 : 0, credentialId, userId);
|
||||
return result.changes > 0;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Backward compatibility - keep old names pointing to new system
|
||||
const githubTokensDb = {
|
||||
createGithubToken: (userId, tokenName, githubToken, description = null) => {
|
||||
return credentialsDb.createCredential(userId, tokenName, 'github_token', githubToken, description);
|
||||
},
|
||||
getGithubTokens: (userId) => {
|
||||
return credentialsDb.getCredentials(userId, 'github_token');
|
||||
},
|
||||
getActiveGithubToken: (userId) => {
|
||||
return credentialsDb.getActiveCredential(userId, 'github_token');
|
||||
},
|
||||
deleteGithubToken: (userId, tokenId) => {
|
||||
return credentialsDb.deleteCredential(userId, tokenId);
|
||||
},
|
||||
toggleGithubToken: (userId, tokenId, isActive) => {
|
||||
return credentialsDb.toggleCredential(userId, tokenId, isActive);
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
db,
|
||||
initializeDatabase,
|
||||
userDb
|
||||
userDb,
|
||||
apiKeysDb,
|
||||
credentialsDb,
|
||||
githubTokensDb // Backward compatibility
|
||||
};
|
||||
@@ -13,4 +13,37 @@ CREATE TABLE IF NOT EXISTS users (
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
|
||||
|
||||
-- API Keys table for external API access
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
key_name TEXT NOT NULL,
|
||||
api_key TEXT UNIQUE NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_used DATETIME,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active);
|
||||
|
||||
-- User credentials table for storing various tokens/credentials (GitHub, GitLab, etc.)
|
||||
CREATE TABLE IF NOT EXISTS user_credentials (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
credential_name TEXT NOT NULL,
|
||||
credential_type TEXT NOT NULL, -- 'github_token', 'gitlab_token', 'bitbucket_token', etc.
|
||||
credential_value TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active);
|
||||
@@ -47,6 +47,8 @@ import cursorRoutes from './routes/cursor.js';
|
||||
import taskmasterRoutes from './routes/taskmaster.js';
|
||||
import mcpUtilsRoutes from './routes/mcp-utils.js';
|
||||
import commandsRoutes from './routes/commands.js';
|
||||
import settingsRoutes from './routes/settings.js';
|
||||
import agentRoutes from './routes/agent.js';
|
||||
import { initializeDatabase } from './database/db.js';
|
||||
import { validateApiKey, authenticateToken, authenticateWebSocket } from './middleware/auth.js';
|
||||
|
||||
@@ -196,6 +198,15 @@ app.use('/api/mcp-utils', authenticateToken, mcpUtilsRoutes);
|
||||
// Commands API Routes (protected)
|
||||
app.use('/api/commands', authenticateToken, commandsRoutes);
|
||||
|
||||
// Settings API Routes (protected)
|
||||
app.use('/api/settings', authenticateToken, settingsRoutes);
|
||||
|
||||
// Agent API Routes (uses API key authentication)
|
||||
app.use('/api/agent', agentRoutes);
|
||||
|
||||
// Serve public files (like api-docs.html)
|
||||
app.use(express.static(path.join(__dirname, '../public')));
|
||||
|
||||
// Static files served after API routes
|
||||
// Add cache control: HTML files should not be cached, but assets can be cached
|
||||
app.use(express.static(path.join(__dirname, '../dist'), {
|
||||
|
||||
@@ -523,10 +523,12 @@ async function getProjects() {
|
||||
|
||||
async function getSessions(projectName, limit = 5, offset = 0) {
|
||||
const projectDir = path.join(process.env.HOME, '.claude', 'projects', projectName);
|
||||
|
||||
|
||||
try {
|
||||
const files = await fs.readdir(projectDir);
|
||||
const jsonlFiles = files.filter(file => file.endsWith('.jsonl'));
|
||||
// agent-*.jsonl files contain session start data at this point. This needs to be revisited
|
||||
// periodically to make sure only accurate data is there and no new functionality is added there
|
||||
const jsonlFiles = files.filter(file => file.endsWith('.jsonl') && !file.startsWith('agent-'));
|
||||
|
||||
if (jsonlFiles.length === 0) {
|
||||
return { sessions: [], hasMore: false, total: 0 };
|
||||
@@ -803,10 +805,12 @@ async function parseJsonlSessions(filePath) {
|
||||
// Get messages for a specific session with pagination support
|
||||
async function getSessionMessages(projectName, sessionId, limit = null, offset = 0) {
|
||||
const projectDir = path.join(process.env.HOME, '.claude', 'projects', projectName);
|
||||
|
||||
|
||||
try {
|
||||
const files = await fs.readdir(projectDir);
|
||||
const jsonlFiles = files.filter(file => file.endsWith('.jsonl'));
|
||||
// agent-*.jsonl files contain session start data at this point. This needs to be revisited
|
||||
// periodically to make sure only accurate data is there and no new functionality is added there
|
||||
const jsonlFiles = files.filter(file => file.endsWith('.jsonl') && !file.startsWith('agent-'));
|
||||
|
||||
if (jsonlFiles.length === 0) {
|
||||
return { messages: [], total: 0, hasMore: false };
|
||||
|
||||
559
server/routes/agent.js
Normal file
559
server/routes/agent.js
Normal file
@@ -0,0 +1,559 @@
|
||||
import express from 'express';
|
||||
import { spawn } from 'child_process';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { promises as fs } from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { apiKeysDb, githubTokensDb } from '../database/db.js';
|
||||
import { addProjectManually } from '../projects.js';
|
||||
import { queryClaudeSDK } from '../claude-sdk.js';
|
||||
import { spawnCursor } from '../cursor-cli.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Middleware to validate API key for external requests
|
||||
const validateExternalApiKey = (req, res, next) => {
|
||||
const apiKey = req.headers['x-api-key'] || req.query.apiKey;
|
||||
|
||||
if (!apiKey) {
|
||||
return res.status(401).json({ error: 'API key required' });
|
||||
}
|
||||
|
||||
const user = apiKeysDb.validateApiKey(apiKey);
|
||||
|
||||
if (!user) {
|
||||
return res.status(401).json({ error: 'Invalid or inactive API key' });
|
||||
}
|
||||
|
||||
req.user = user;
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the remote URL of a git repository
|
||||
* @param {string} repoPath - Path to the git repository
|
||||
* @returns {Promise<string>} - Remote URL of the repository
|
||||
*/
|
||||
async function getGitRemoteUrl(repoPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const gitProcess = spawn('git', ['config', '--get', 'remote.origin.url'], {
|
||||
cwd: repoPath,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
gitProcess.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve(stdout.trim());
|
||||
} else {
|
||||
reject(new Error(`Failed to get git remote: ${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
reject(new Error(`Failed to execute git: ${error.message}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize GitHub URLs for comparison
|
||||
* @param {string} url - GitHub URL
|
||||
* @returns {string} - Normalized URL
|
||||
*/
|
||||
function normalizeGitHubUrl(url) {
|
||||
// Remove .git suffix
|
||||
let normalized = url.replace(/\.git$/, '');
|
||||
// Convert SSH to HTTPS format for comparison
|
||||
normalized = normalized.replace(/^git@github\.com:/, 'https://github.com/');
|
||||
// Remove trailing slash
|
||||
normalized = normalized.replace(/\/$/, '');
|
||||
return normalized.toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a GitHub repository to a directory
|
||||
* @param {string} githubUrl - GitHub repository URL
|
||||
* @param {string} githubToken - Optional GitHub token for private repos
|
||||
* @param {string} projectPath - Path for cloning the repository
|
||||
* @returns {Promise<string>} - Path to the cloned repository
|
||||
*/
|
||||
async function cloneGitHubRepo(githubUrl, githubToken = null, projectPath) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
// Validate GitHub URL
|
||||
if (!githubUrl || !githubUrl.includes('github.com')) {
|
||||
throw new Error('Invalid GitHub URL');
|
||||
}
|
||||
|
||||
const cloneDir = path.resolve(projectPath);
|
||||
|
||||
// Check if directory already exists
|
||||
try {
|
||||
await fs.access(cloneDir);
|
||||
// Directory exists - check if it's a git repo with the same URL
|
||||
try {
|
||||
const existingUrl = await getGitRemoteUrl(cloneDir);
|
||||
const normalizedExisting = normalizeGitHubUrl(existingUrl);
|
||||
const normalizedRequested = normalizeGitHubUrl(githubUrl);
|
||||
|
||||
if (normalizedExisting === normalizedRequested) {
|
||||
console.log('✅ Repository already exists at path with correct URL');
|
||||
return resolve(cloneDir);
|
||||
} else {
|
||||
throw new Error(`Directory ${cloneDir} already exists with a different repository (${existingUrl}). Expected: ${githubUrl}`);
|
||||
}
|
||||
} catch (gitError) {
|
||||
throw new Error(`Directory ${cloneDir} already exists but is not a valid git repository or git command failed`);
|
||||
}
|
||||
} catch (accessError) {
|
||||
// Directory doesn't exist - proceed with clone
|
||||
}
|
||||
|
||||
// Ensure parent directory exists
|
||||
await fs.mkdir(path.dirname(cloneDir), { recursive: true });
|
||||
|
||||
// Prepare the git clone URL with authentication if token is provided
|
||||
let cloneUrl = githubUrl;
|
||||
if (githubToken) {
|
||||
// Convert HTTPS URL to authenticated URL
|
||||
// Example: https://github.com/user/repo -> https://token@github.com/user/repo
|
||||
cloneUrl = githubUrl.replace('https://github.com', `https://${githubToken}@github.com`);
|
||||
}
|
||||
|
||||
console.log('🔄 Cloning repository:', githubUrl);
|
||||
console.log('📁 Destination:', cloneDir);
|
||||
|
||||
// Execute git clone
|
||||
const gitProcess = spawn('git', ['clone', '--depth', '1', cloneUrl, cloneDir], {
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
gitProcess.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
console.log('Git stderr:', data.toString());
|
||||
});
|
||||
|
||||
gitProcess.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
console.log('✅ Repository cloned successfully');
|
||||
resolve(cloneDir);
|
||||
} else {
|
||||
console.error('❌ Git clone failed:', stderr);
|
||||
reject(new Error(`Git clone failed: ${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
reject(new Error(`Failed to execute git: ${error.message}`));
|
||||
});
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up a temporary project directory and its Claude session
|
||||
* @param {string} projectPath - Path to the project directory
|
||||
* @param {string} sessionId - Session ID to clean up
|
||||
*/
|
||||
async function cleanupProject(projectPath, sessionId = null) {
|
||||
try {
|
||||
// Only clean up projects in the external-projects directory
|
||||
if (!projectPath.includes('.claude/external-projects')) {
|
||||
console.warn('⚠️ Refusing to clean up non-external project:', projectPath);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('🧹 Cleaning up project:', projectPath);
|
||||
await fs.rm(projectPath, { recursive: true, force: true });
|
||||
console.log('✅ Project cleaned up');
|
||||
|
||||
// Also clean up the Claude session directory if sessionId provided
|
||||
if (sessionId) {
|
||||
try {
|
||||
const sessionPath = path.join(os.homedir(), '.claude', 'sessions', sessionId);
|
||||
console.log('🧹 Cleaning up session directory:', sessionPath);
|
||||
await fs.rm(sessionPath, { recursive: true, force: true });
|
||||
console.log('✅ Session directory cleaned up');
|
||||
} catch (error) {
|
||||
console.error('⚠️ Failed to clean up session directory:', error.message);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to clean up project:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* SSE Stream Writer - Adapts SDK/CLI output to Server-Sent Events
|
||||
*/
|
||||
class SSEStreamWriter {
|
||||
constructor(res) {
|
||||
this.res = res;
|
||||
this.sessionId = null;
|
||||
}
|
||||
|
||||
send(data) {
|
||||
if (this.res.writableEnded) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Format as SSE
|
||||
this.res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
}
|
||||
|
||||
end() {
|
||||
if (!this.res.writableEnded) {
|
||||
this.res.write('data: {"type":"done"}\n\n');
|
||||
this.res.end();
|
||||
}
|
||||
}
|
||||
|
||||
setSessionId(sessionId) {
|
||||
this.sessionId = sessionId;
|
||||
}
|
||||
|
||||
getSessionId() {
|
||||
return this.sessionId;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Non-streaming response collector
|
||||
*/
|
||||
class ResponseCollector {
|
||||
constructor() {
|
||||
this.messages = [];
|
||||
this.sessionId = null;
|
||||
}
|
||||
|
||||
send(data) {
|
||||
// Store ALL messages for now - we'll filter when returning
|
||||
this.messages.push(data);
|
||||
|
||||
// Extract sessionId if present
|
||||
if (typeof data === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(data);
|
||||
if (parsed.sessionId) {
|
||||
this.sessionId = parsed.sessionId;
|
||||
}
|
||||
} catch (e) {
|
||||
// Not JSON, ignore
|
||||
}
|
||||
} else if (data && data.sessionId) {
|
||||
this.sessionId = data.sessionId;
|
||||
}
|
||||
}
|
||||
|
||||
end() {
|
||||
// Do nothing - we'll collect all messages
|
||||
}
|
||||
|
||||
setSessionId(sessionId) {
|
||||
this.sessionId = sessionId;
|
||||
}
|
||||
|
||||
getSessionId() {
|
||||
return this.sessionId;
|
||||
}
|
||||
|
||||
getMessages() {
|
||||
return this.messages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get filtered assistant messages only
|
||||
*/
|
||||
getAssistantMessages() {
|
||||
const assistantMessages = [];
|
||||
|
||||
for (const msg of this.messages) {
|
||||
// Skip initial status message
|
||||
if (msg && msg.type === 'status') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle JSON strings
|
||||
if (typeof msg === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(msg);
|
||||
// Only include claude-response messages with assistant type
|
||||
if (parsed.type === 'claude-response' && parsed.data && parsed.data.type === 'assistant') {
|
||||
assistantMessages.push(parsed.data);
|
||||
}
|
||||
} catch (e) {
|
||||
// Not JSON, skip
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return assistantMessages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate total tokens from all messages
|
||||
*/
|
||||
getTotalTokens() {
|
||||
let totalInput = 0;
|
||||
let totalOutput = 0;
|
||||
let totalCacheRead = 0;
|
||||
let totalCacheCreation = 0;
|
||||
|
||||
for (const msg of this.messages) {
|
||||
let data = msg;
|
||||
|
||||
// Parse if string
|
||||
if (typeof msg === 'string') {
|
||||
try {
|
||||
data = JSON.parse(msg);
|
||||
} catch (e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract usage from claude-response messages
|
||||
if (data && data.type === 'claude-response' && data.data) {
|
||||
const msgData = data.data;
|
||||
if (msgData.message && msgData.message.usage) {
|
||||
const usage = msgData.message.usage;
|
||||
totalInput += usage.input_tokens || 0;
|
||||
totalOutput += usage.output_tokens || 0;
|
||||
totalCacheRead += usage.cache_read_input_tokens || 0;
|
||||
totalCacheCreation += usage.cache_creation_input_tokens || 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
inputTokens: totalInput,
|
||||
outputTokens: totalOutput,
|
||||
cacheReadTokens: totalCacheRead,
|
||||
cacheCreationTokens: totalCacheCreation,
|
||||
totalTokens: totalInput + totalOutput + totalCacheRead + totalCacheCreation
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ===============================
|
||||
// External API Endpoint
|
||||
// ===============================
|
||||
|
||||
/**
|
||||
* POST /api/agent
|
||||
*
|
||||
* Trigger an AI agent (Claude or Cursor) to work on a project
|
||||
*
|
||||
* Body:
|
||||
* - githubUrl: string (conditionally required) - GitHub repository URL to clone
|
||||
* - projectPath: string (conditionally required) - Path to existing project or where to clone
|
||||
* - message: string (required) - Message to send to the AI agent
|
||||
* - provider: string (optional) - 'claude' or 'cursor' (default: 'claude')
|
||||
* - stream: boolean (optional) - Whether to stream responses (default: true)
|
||||
* - model: string (optional) - Model to use (for Cursor)
|
||||
* - cleanup: boolean (optional) - Whether to cleanup project after completion (default: true)
|
||||
* - githubToken: string (optional) - GitHub token for private repos (overrides stored token)
|
||||
*
|
||||
* Note: Either githubUrl OR projectPath must be provided. If both are provided, githubUrl will be cloned to projectPath.
|
||||
*/
|
||||
router.post('/', validateExternalApiKey, async (req, res) => {
|
||||
const { githubUrl, projectPath, message, provider = 'claude', model, githubToken } = req.body;
|
||||
|
||||
// Parse stream and cleanup as booleans (handle string "true"/"false" from curl)
|
||||
const stream = req.body.stream === undefined ? true : (req.body.stream === true || req.body.stream === 'true');
|
||||
const cleanup = req.body.cleanup === undefined ? true : (req.body.cleanup === true || req.body.cleanup === 'true');
|
||||
|
||||
// Validate inputs
|
||||
if (!githubUrl && !projectPath) {
|
||||
return res.status(400).json({ error: 'Either githubUrl or projectPath is required' });
|
||||
}
|
||||
|
||||
if (!message || !message.trim()) {
|
||||
return res.status(400).json({ error: 'message is required' });
|
||||
}
|
||||
|
||||
if (!['claude', 'cursor'].includes(provider)) {
|
||||
return res.status(400).json({ error: 'provider must be "claude" or "cursor"' });
|
||||
}
|
||||
|
||||
let finalProjectPath = null;
|
||||
let writer = null;
|
||||
|
||||
try {
|
||||
// Determine the final project path
|
||||
if (githubUrl) {
|
||||
// Clone repository (to projectPath if provided, otherwise generate path)
|
||||
const tokenToUse = githubToken || githubTokensDb.getActiveGithubToken(req.user.id);
|
||||
|
||||
let targetPath;
|
||||
if (projectPath) {
|
||||
targetPath = projectPath;
|
||||
} else {
|
||||
// Generate a unique path for cloning
|
||||
const repoHash = crypto.createHash('md5').update(githubUrl + Date.now()).digest('hex');
|
||||
targetPath = path.join(os.homedir(), '.claude', 'external-projects', repoHash);
|
||||
}
|
||||
|
||||
finalProjectPath = await cloneGitHubRepo(githubUrl.trim(), tokenToUse, targetPath);
|
||||
} else {
|
||||
// Use existing project path
|
||||
finalProjectPath = path.resolve(projectPath);
|
||||
|
||||
// Verify the path exists
|
||||
try {
|
||||
await fs.access(finalProjectPath);
|
||||
} catch (error) {
|
||||
throw new Error(`Project path does not exist: ${finalProjectPath}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Register the project (or use existing registration)
|
||||
let project;
|
||||
try {
|
||||
project = await addProjectManually(finalProjectPath);
|
||||
console.log('📦 Project registered:', project);
|
||||
} catch (error) {
|
||||
// If project already exists, that's fine - continue with the existing registration
|
||||
if (error.message && error.message.includes('Project already configured')) {
|
||||
console.log('📦 Using existing project registration for:', finalProjectPath);
|
||||
project = { path: finalProjectPath };
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Set up writer based on streaming mode
|
||||
if (stream) {
|
||||
// Set up SSE headers for streaming
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.setHeader('X-Accel-Buffering', 'no'); // Disable nginx buffering
|
||||
|
||||
writer = new SSEStreamWriter(res);
|
||||
|
||||
// Send initial status
|
||||
writer.send({
|
||||
type: 'status',
|
||||
message: githubUrl ? 'Repository cloned and session started' : 'Session started',
|
||||
projectPath: finalProjectPath
|
||||
});
|
||||
} else {
|
||||
// Non-streaming mode: collect messages
|
||||
writer = new ResponseCollector();
|
||||
|
||||
// Collect initial status message
|
||||
writer.send({
|
||||
type: 'status',
|
||||
message: githubUrl ? 'Repository cloned and session started' : 'Session started',
|
||||
projectPath: finalProjectPath
|
||||
});
|
||||
}
|
||||
|
||||
// Start the appropriate session
|
||||
if (provider === 'claude') {
|
||||
console.log('🤖 Starting Claude SDK session');
|
||||
|
||||
await queryClaudeSDK(message.trim(), {
|
||||
projectPath: finalProjectPath,
|
||||
cwd: finalProjectPath,
|
||||
sessionId: null, // New session
|
||||
permissionMode: 'bypassPermissions' // Bypass all permissions for API calls
|
||||
}, writer);
|
||||
|
||||
} else if (provider === 'cursor') {
|
||||
console.log('🖱️ Starting Cursor CLI session');
|
||||
|
||||
await spawnCursor(message.trim(), {
|
||||
projectPath: finalProjectPath,
|
||||
cwd: finalProjectPath,
|
||||
sessionId: null, // New session
|
||||
model: model || undefined,
|
||||
skipPermissions: true // Bypass permissions for Cursor
|
||||
}, writer);
|
||||
}
|
||||
|
||||
// Handle response based on streaming mode
|
||||
if (stream) {
|
||||
// Streaming mode: end the SSE stream
|
||||
writer.end();
|
||||
} else {
|
||||
// Non-streaming mode: send filtered messages and token summary as JSON
|
||||
const assistantMessages = writer.getAssistantMessages();
|
||||
const tokenSummary = writer.getTotalTokens();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
sessionId: writer.getSessionId(),
|
||||
messages: assistantMessages,
|
||||
tokens: tokenSummary,
|
||||
projectPath: finalProjectPath
|
||||
});
|
||||
}
|
||||
|
||||
// Clean up if requested
|
||||
if (cleanup && githubUrl) {
|
||||
// Only cleanup if we cloned a repo (not for existing project paths)
|
||||
const sessionIdForCleanup = writer.getSessionId();
|
||||
setTimeout(() => {
|
||||
cleanupProject(finalProjectPath, sessionIdForCleanup);
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ External session error:', error);
|
||||
|
||||
// Clean up on error
|
||||
if (finalProjectPath && cleanup && githubUrl) {
|
||||
const sessionIdForCleanup = writer ? writer.getSessionId() : null;
|
||||
cleanupProject(finalProjectPath, sessionIdForCleanup);
|
||||
}
|
||||
|
||||
if (stream) {
|
||||
// For streaming, send error event and stop
|
||||
if (!writer) {
|
||||
// Set up SSE headers if not already done
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.setHeader('X-Accel-Buffering', 'no');
|
||||
writer = new SSEStreamWriter(res);
|
||||
}
|
||||
|
||||
if (!res.writableEnded) {
|
||||
writer.send({
|
||||
type: 'error',
|
||||
error: error.message,
|
||||
message: `Failed: ${error.message}`
|
||||
});
|
||||
writer.end();
|
||||
}
|
||||
} else if (!res.headersSent) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -4,6 +4,8 @@ import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import { promises as fs } from 'fs';
|
||||
import { extractProjectDirectory } from '../projects.js';
|
||||
import { queryClaudeSDK } from '../claude-sdk.js';
|
||||
import { spawnCursor } from '../cursor-cli.js';
|
||||
|
||||
const router = express.Router();
|
||||
const execAsync = promisify(exec);
|
||||
@@ -343,19 +345,24 @@ router.get('/commit-diff', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Generate commit message based on staged changes
|
||||
// Generate commit message based on staged changes using AI
|
||||
router.post('/generate-commit-message', async (req, res) => {
|
||||
const { project, files } = req.body;
|
||||
|
||||
const { project, files, provider = 'claude' } = req.body;
|
||||
|
||||
if (!project || !files || files.length === 0) {
|
||||
return res.status(400).json({ error: 'Project name and files are required' });
|
||||
}
|
||||
|
||||
// Validate provider
|
||||
if (!['claude', 'cursor'].includes(provider)) {
|
||||
return res.status(400).json({ error: 'provider must be "claude" or "cursor"' });
|
||||
}
|
||||
|
||||
try {
|
||||
const projectPath = await getActualProjectPath(project);
|
||||
|
||||
|
||||
// Get diff for selected files
|
||||
let combinedDiff = '';
|
||||
let diffContext = '';
|
||||
for (const file of files) {
|
||||
try {
|
||||
const { stdout } = await execAsync(
|
||||
@@ -363,17 +370,30 @@ router.post('/generate-commit-message', async (req, res) => {
|
||||
{ cwd: projectPath }
|
||||
);
|
||||
if (stdout) {
|
||||
combinedDiff += `\n--- ${file} ---\n${stdout}`;
|
||||
diffContext += `\n--- ${file} ---\n${stdout}`;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error getting diff for ${file}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Use AI to generate commit message (simple implementation)
|
||||
// In a real implementation, you might want to use GPT or Claude API
|
||||
const message = generateSimpleCommitMessage(files, combinedDiff);
|
||||
|
||||
|
||||
// If no diff found, might be untracked files
|
||||
if (!diffContext.trim()) {
|
||||
// Try to get content of untracked files
|
||||
for (const file of files) {
|
||||
try {
|
||||
const filePath = path.join(projectPath, file);
|
||||
const content = await fs.readFile(filePath, 'utf-8');
|
||||
diffContext += `\n--- ${file} (new file) ---\n${content.substring(0, 1000)}\n`;
|
||||
} catch (error) {
|
||||
console.error(`Error reading file ${file}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate commit message using AI
|
||||
const message = await generateCommitMessageWithAI(files, diffContext, provider, projectPath);
|
||||
|
||||
res.json({ message });
|
||||
} catch (error) {
|
||||
console.error('Generate commit message error:', error);
|
||||
@@ -381,46 +401,145 @@ router.post('/generate-commit-message', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Simple commit message generator (can be replaced with AI)
|
||||
function generateSimpleCommitMessage(files, diff) {
|
||||
const fileCount = files.length;
|
||||
const isMultipleFiles = fileCount > 1;
|
||||
|
||||
// Analyze the diff to determine the type of change
|
||||
const additions = (diff.match(/^\+[^+]/gm) || []).length;
|
||||
const deletions = (diff.match(/^-[^-]/gm) || []).length;
|
||||
|
||||
// Determine the primary action
|
||||
let action = 'Update';
|
||||
if (additions > 0 && deletions === 0) {
|
||||
action = 'Add';
|
||||
} else if (deletions > 0 && additions === 0) {
|
||||
action = 'Remove';
|
||||
} else if (additions > deletions * 2) {
|
||||
action = 'Enhance';
|
||||
} else if (deletions > additions * 2) {
|
||||
action = 'Refactor';
|
||||
}
|
||||
|
||||
// Generate message based on files
|
||||
if (isMultipleFiles) {
|
||||
const components = new Set(files.map(f => {
|
||||
const parts = f.split('/');
|
||||
return parts[parts.length - 2] || parts[0];
|
||||
}));
|
||||
|
||||
if (components.size === 1) {
|
||||
return `${action} ${[...components][0]} component`;
|
||||
} else {
|
||||
return `${action} multiple components`;
|
||||
/**
|
||||
* Generates a commit message using AI (Claude SDK or Cursor CLI)
|
||||
* @param {Array<string>} files - List of changed files
|
||||
* @param {string} diffContext - Git diff content
|
||||
* @param {string} provider - 'claude' or 'cursor'
|
||||
* @param {string} projectPath - Project directory path
|
||||
* @returns {Promise<string>} Generated commit message
|
||||
*/
|
||||
async function generateCommitMessageWithAI(files, diffContext, provider, projectPath) {
|
||||
// Create the prompt
|
||||
const prompt = `You are a git commit message generator. Based on the following file changes and diffs, generate a commit message in conventional commit format.
|
||||
|
||||
REQUIREMENTS:
|
||||
- Use conventional commit format: type(scope): subject
|
||||
- Include a body that explains what changed and why
|
||||
- Valid types: feat, fix, docs, style, refactor, perf, test, build, ci, chore
|
||||
- Keep subject line under 50 characters
|
||||
- Wrap body at 72 characters
|
||||
- Be specific and descriptive
|
||||
- Return ONLY the commit message, nothing else - no markdown, no explanations, no code blocks
|
||||
|
||||
FILES CHANGED:
|
||||
${files.map(f => `- ${f}`).join('\n')}
|
||||
|
||||
DIFFS:
|
||||
${diffContext.substring(0, 4000)}
|
||||
|
||||
Generate the commit message now:`;
|
||||
|
||||
try {
|
||||
// Create a simple writer that collects the response
|
||||
let responseText = '';
|
||||
const writer = {
|
||||
send: (data) => {
|
||||
try {
|
||||
const parsed = typeof data === 'string' ? JSON.parse(data) : data;
|
||||
console.log('🔍 Writer received message type:', parsed.type);
|
||||
|
||||
// Handle different message formats from Claude SDK and Cursor CLI
|
||||
// Claude SDK sends: {type: 'claude-response', data: {message: {content: [...]}}}
|
||||
if (parsed.type === 'claude-response' && parsed.data) {
|
||||
const message = parsed.data.message || parsed.data;
|
||||
console.log('📦 Claude response message:', JSON.stringify(message, null, 2).substring(0, 500));
|
||||
if (message.content && Array.isArray(message.content)) {
|
||||
// Extract text from content array
|
||||
for (const item of message.content) {
|
||||
if (item.type === 'text' && item.text) {
|
||||
console.log('✅ Extracted text chunk:', item.text.substring(0, 100));
|
||||
responseText += item.text;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Cursor CLI sends: {type: 'cursor-output', output: '...'}
|
||||
else if (parsed.type === 'cursor-output' && parsed.output) {
|
||||
console.log('✅ Cursor output:', parsed.output.substring(0, 100));
|
||||
responseText += parsed.output;
|
||||
}
|
||||
// Also handle direct text messages
|
||||
else if (parsed.type === 'text' && parsed.text) {
|
||||
console.log('✅ Direct text:', parsed.text.substring(0, 100));
|
||||
responseText += parsed.text;
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore parse errors
|
||||
console.error('Error parsing writer data:', e);
|
||||
}
|
||||
},
|
||||
setSessionId: () => {}, // No-op for this use case
|
||||
};
|
||||
|
||||
console.log('🚀 Calling AI agent with provider:', provider);
|
||||
console.log('📝 Prompt length:', prompt.length);
|
||||
|
||||
// Call the appropriate agent
|
||||
if (provider === 'claude') {
|
||||
await queryClaudeSDK(prompt, {
|
||||
cwd: projectPath,
|
||||
permissionMode: 'bypassPermissions',
|
||||
model: 'sonnet'
|
||||
}, writer);
|
||||
} else if (provider === 'cursor') {
|
||||
await spawnCursor(prompt, {
|
||||
cwd: projectPath,
|
||||
skipPermissions: true
|
||||
}, writer);
|
||||
}
|
||||
} else {
|
||||
const fileName = files[0].split('/').pop();
|
||||
const componentName = fileName.replace(/\.(jsx?|tsx?|css|scss)$/, '');
|
||||
return `${action} ${componentName}`;
|
||||
|
||||
console.log('📊 Total response text collected:', responseText.length, 'characters');
|
||||
console.log('📄 Response preview:', responseText.substring(0, 200));
|
||||
|
||||
// Clean up the response
|
||||
const cleanedMessage = cleanCommitMessage(responseText);
|
||||
console.log('🧹 Cleaned message:', cleanedMessage.substring(0, 200));
|
||||
|
||||
return cleanedMessage || 'chore: update files';
|
||||
} catch (error) {
|
||||
console.error('Error generating commit message with AI:', error);
|
||||
// Fallback to simple message
|
||||
return `chore: update ${files.length} file${files.length !== 1 ? 's' : ''}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans the AI-generated commit message by removing markdown, code blocks, and extra formatting
|
||||
* @param {string} text - Raw AI response
|
||||
* @returns {string} Clean commit message
|
||||
*/
|
||||
function cleanCommitMessage(text) {
|
||||
if (!text || !text.trim()) {
|
||||
return '';
|
||||
}
|
||||
|
||||
let cleaned = text.trim();
|
||||
|
||||
// Remove markdown code blocks
|
||||
cleaned = cleaned.replace(/```[a-z]*\n/g, '');
|
||||
cleaned = cleaned.replace(/```/g, '');
|
||||
|
||||
// Remove markdown headers
|
||||
cleaned = cleaned.replace(/^#+\s*/gm, '');
|
||||
|
||||
// Remove leading/trailing quotes
|
||||
cleaned = cleaned.replace(/^["']|["']$/g, '');
|
||||
|
||||
// If there are multiple lines, take everything (subject + body)
|
||||
// Just clean up extra blank lines
|
||||
cleaned = cleaned.replace(/\n{3,}/g, '\n\n');
|
||||
|
||||
// Remove any explanatory text before the actual commit message
|
||||
// Look for conventional commit pattern and start from there
|
||||
const conventionalCommitMatch = cleaned.match(/(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\(.+?\))?:.+/s);
|
||||
if (conventionalCommitMatch) {
|
||||
cleaned = cleaned.substring(cleaned.indexOf(conventionalCommitMatch[0]));
|
||||
}
|
||||
|
||||
return cleaned.trim();
|
||||
}
|
||||
|
||||
// Get remote status (ahead/behind commits with smart remote detection)
|
||||
router.get('/remote-status', async (req, res) => {
|
||||
const { project } = req.query;
|
||||
|
||||
178
server/routes/settings.js
Normal file
178
server/routes/settings.js
Normal file
@@ -0,0 +1,178 @@
|
||||
import express from 'express';
|
||||
import { apiKeysDb, credentialsDb } from '../database/db.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// ===============================
|
||||
// API Keys Management
|
||||
// ===============================
|
||||
|
||||
// Get all API keys for the authenticated user
|
||||
router.get('/api-keys', async (req, res) => {
|
||||
try {
|
||||
const apiKeys = apiKeysDb.getApiKeys(req.user.id);
|
||||
// Don't send the full API key in the list for security
|
||||
const sanitizedKeys = apiKeys.map(key => ({
|
||||
...key,
|
||||
api_key: key.api_key.substring(0, 10) + '...'
|
||||
}));
|
||||
res.json({ apiKeys: sanitizedKeys });
|
||||
} catch (error) {
|
||||
console.error('Error fetching API keys:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch API keys' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new API key
|
||||
router.post('/api-keys', async (req, res) => {
|
||||
try {
|
||||
const { keyName } = req.body;
|
||||
|
||||
if (!keyName || !keyName.trim()) {
|
||||
return res.status(400).json({ error: 'Key name is required' });
|
||||
}
|
||||
|
||||
const result = apiKeysDb.createApiKey(req.user.id, keyName.trim());
|
||||
res.json({
|
||||
success: true,
|
||||
apiKey: result
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error creating API key:', error);
|
||||
res.status(500).json({ error: 'Failed to create API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete an API key
|
||||
router.delete('/api-keys/:keyId', async (req, res) => {
|
||||
try {
|
||||
const { keyId } = req.params;
|
||||
const success = apiKeysDb.deleteApiKey(req.user.id, parseInt(keyId));
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error deleting API key:', error);
|
||||
res.status(500).json({ error: 'Failed to delete API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// Toggle API key active status
|
||||
router.patch('/api-keys/:keyId/toggle', async (req, res) => {
|
||||
try {
|
||||
const { keyId } = req.params;
|
||||
const { isActive } = req.body;
|
||||
|
||||
if (typeof isActive !== 'boolean') {
|
||||
return res.status(400).json({ error: 'isActive must be a boolean' });
|
||||
}
|
||||
|
||||
const success = apiKeysDb.toggleApiKey(req.user.id, parseInt(keyId), isActive);
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'API key not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error toggling API key:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle API key' });
|
||||
}
|
||||
});
|
||||
|
||||
// ===============================
|
||||
// Generic Credentials Management
|
||||
// ===============================
|
||||
|
||||
// Get all credentials for the authenticated user (optionally filtered by type)
|
||||
router.get('/credentials', async (req, res) => {
|
||||
try {
|
||||
const { type } = req.query;
|
||||
const credentials = credentialsDb.getCredentials(req.user.id, type || null);
|
||||
// Don't send the actual credential values for security
|
||||
res.json({ credentials });
|
||||
} catch (error) {
|
||||
console.error('Error fetching credentials:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch credentials' });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new credential
|
||||
router.post('/credentials', async (req, res) => {
|
||||
try {
|
||||
const { credentialName, credentialType, credentialValue, description } = req.body;
|
||||
|
||||
if (!credentialName || !credentialName.trim()) {
|
||||
return res.status(400).json({ error: 'Credential name is required' });
|
||||
}
|
||||
|
||||
if (!credentialType || !credentialType.trim()) {
|
||||
return res.status(400).json({ error: 'Credential type is required' });
|
||||
}
|
||||
|
||||
if (!credentialValue || !credentialValue.trim()) {
|
||||
return res.status(400).json({ error: 'Credential value is required' });
|
||||
}
|
||||
|
||||
const result = credentialsDb.createCredential(
|
||||
req.user.id,
|
||||
credentialName.trim(),
|
||||
credentialType.trim(),
|
||||
credentialValue.trim(),
|
||||
description?.trim() || null
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
credential: result
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error creating credential:', error);
|
||||
res.status(500).json({ error: 'Failed to create credential' });
|
||||
}
|
||||
});
|
||||
|
||||
// Delete a credential
|
||||
router.delete('/credentials/:credentialId', async (req, res) => {
|
||||
try {
|
||||
const { credentialId } = req.params;
|
||||
const success = credentialsDb.deleteCredential(req.user.id, parseInt(credentialId));
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'Credential not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error deleting credential:', error);
|
||||
res.status(500).json({ error: 'Failed to delete credential' });
|
||||
}
|
||||
});
|
||||
|
||||
// Toggle credential active status
|
||||
router.patch('/credentials/:credentialId/toggle', async (req, res) => {
|
||||
try {
|
||||
const { credentialId } = req.params;
|
||||
const { isActive } = req.body;
|
||||
|
||||
if (typeof isActive !== 'boolean') {
|
||||
return res.status(400).json({ error: 'isActive must be a boolean' });
|
||||
}
|
||||
|
||||
const success = credentialsDb.toggleCredential(req.user.id, parseInt(credentialId), isActive);
|
||||
|
||||
if (success) {
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'Credential not found' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error toggling credential:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle credential' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
Reference in New Issue
Block a user