From f77301e844e98f842077a5cab7caa10eb9ead417 Mon Sep 17 00:00:00 2001 From: Haileyesus Date: Fri, 27 Mar 2026 18:26:30 +0300 Subject: [PATCH] refactor(backend): move every route to its own module --- server/src/modules/agent/agent.routes.js | 1246 +++++++++++ .../src/modules/api-keys/api-keys.routes.js | 86 + .../src/modules/cli-auth/cli-auth.routes.js | 434 ++++ server/src/modules/codex/codex.routes.js | 329 +++ .../src/modules/commands/commands.routes.js | 601 +++++ .../modules/credentials/credentials.routes.js | 98 + server/src/modules/cursor/cursor.routes.js | 798 +++++++ server/src/modules/gemini/gemini.routes.js | 24 + server/src/modules/git/git.routes.js | 1488 +++++++++++++ .../src/modules/mcp-utils/mcp-utils.routes.js | 48 + server/src/modules/mcp/mcp.routes.js | 552 +++++ .../src/modules/messages/messages.routes.js | 61 + .../notification-preferences.routes.js | 30 + server/src/modules/plugins/plugins.routes.js | 307 +++ .../src/modules/projects/projects.routes.js | 548 +++++ .../src/modules/push-sub/push-sub.routes.js | 80 + .../src/modules/settings/settings.routes.js | 276 +++ .../modules/taskmaster/taskmaster.routes.js | 1963 +++++++++++++++++ server/src/runner.ts | 101 +- src/App.tsx | 1 - .../data/workspaceApi.ts | 2 +- .../settings/hooks/useCredentialsSettings.ts | 16 +- .../settings/hooks/useSettingsController.ts | 4 +- src/hooks/useWebPush.ts | 6 +- 24 files changed, 9082 insertions(+), 17 deletions(-) create mode 100644 server/src/modules/agent/agent.routes.js create mode 100644 server/src/modules/api-keys/api-keys.routes.js create mode 100644 server/src/modules/cli-auth/cli-auth.routes.js create mode 100644 server/src/modules/codex/codex.routes.js create mode 100644 server/src/modules/commands/commands.routes.js create mode 100644 server/src/modules/credentials/credentials.routes.js create mode 100644 server/src/modules/cursor/cursor.routes.js create mode 100644 server/src/modules/gemini/gemini.routes.js create mode 100644 server/src/modules/git/git.routes.js create mode 100644 server/src/modules/mcp-utils/mcp-utils.routes.js create mode 100644 server/src/modules/mcp/mcp.routes.js create mode 100644 server/src/modules/messages/messages.routes.js create mode 100644 server/src/modules/notification-preferences/notification-preferences.routes.js create mode 100644 server/src/modules/plugins/plugins.routes.js create mode 100644 server/src/modules/projects/projects.routes.js create mode 100644 server/src/modules/push-sub/push-sub.routes.js create mode 100644 server/src/modules/settings/settings.routes.js create mode 100644 server/src/modules/taskmaster/taskmaster.routes.js diff --git a/server/src/modules/agent/agent.routes.js b/server/src/modules/agent/agent.routes.js new file mode 100644 index 00000000..40eb9487 --- /dev/null +++ b/server/src/modules/agent/agent.routes.js @@ -0,0 +1,1246 @@ +import express from 'express'; +import { spawn } from 'child_process'; +import path from 'path'; +import os from 'os'; +import { promises as fs } from 'fs'; +import crypto from 'crypto'; +import { userDb, apiKeysDb, githubTokensDb } from '../../../database/db.js'; +import { addProjectManually } from '../../../projects.js'; +import { queryClaudeSDK } from '../../../claude-sdk.js'; +import { spawnCursor } from '../../../cursor-cli.js'; +import { queryCodex } from '../../../openai-codex.js'; +import { spawnGemini } from '../../../gemini-cli.js'; +import { Octokit } from '@octokit/rest'; +import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../../../shared/modelConstants.js'; +import { IS_PLATFORM } from '../../../constants/config.js'; + +const router = express.Router(); + +/** + * Middleware to authenticate agent API requests. + * + * Supports two authentication modes: + * 1. Platform mode (IS_PLATFORM=true): For managed/hosted deployments where + * authentication is handled by an external proxy. Requests are trusted and + * the default user context is used. + * + * 2. API key mode (default): For self-hosted deployments where users authenticate + * via API keys created in the UI. Keys are validated against the local database. + */ +const validateExternalApiKey = (req, res, next) => { + // Platform mode: Authentication is handled externally (e.g., by a proxy layer). + // Trust the request and use the default user context. + if (IS_PLATFORM) { + try { + const user = userDb.getFirstUser(); + if (!user) { + return res.status(500).json({ error: 'Platform mode: No user found in database' }); + } + req.user = user; + return next(); + } catch (error) { + console.error('Platform mode error:', error); + return res.status(500).json({ error: 'Platform mode: Failed to fetch user' }); + } + } + + // Self-hosted mode: Validate API key from header or query parameter + const apiKey = req.headers['x-api-key'] || req.query.apiKey; + + if (!apiKey) { + return res.status(401).json({ error: 'API key required' }); + } + + const user = apiKeysDb.validateApiKey(apiKey); + + if (!user) { + return res.status(401).json({ error: 'Invalid or inactive API key' }); + } + + req.user = user; + next(); +}; + +/** + * Get the remote URL of a git repository + * @param {string} repoPath - Path to the git repository + * @returns {Promise} - Remote URL of the repository + */ +async function getGitRemoteUrl(repoPath) { + return new Promise((resolve, reject) => { + const gitProcess = spawn('git', ['config', '--get', 'remote.origin.url'], { + cwd: repoPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + gitProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + gitProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + gitProcess.on('close', (code) => { + if (code === 0) { + resolve(stdout.trim()); + } else { + reject(new Error(`Failed to get git remote: ${stderr}`)); + } + }); + + gitProcess.on('error', (error) => { + reject(new Error(`Failed to execute git: ${error.message}`)); + }); + }); +} + +/** + * Normalize GitHub URLs for comparison + * @param {string} url - GitHub URL + * @returns {string} - Normalized URL + */ +function normalizeGitHubUrl(url) { + // Remove .git suffix + let normalized = url.replace(/\.git$/, ''); + // Convert SSH to HTTPS format for comparison + normalized = normalized.replace(/^git@github\.com:/, 'https://github.com/'); + // Remove trailing slash + normalized = normalized.replace(/\/$/, ''); + return normalized.toLowerCase(); +} + +/** + * Parse GitHub URL to extract owner and repo + * @param {string} url - GitHub URL (HTTPS or SSH) + * @returns {{owner: string, repo: string}} - Parsed owner and repo + */ +function parseGitHubUrl(url) { + // Handle HTTPS URLs: https://github.com/owner/repo or https://github.com/owner/repo.git + // Handle SSH URLs: git@github.com:owner/repo or git@github.com:owner/repo.git + const match = url.match(/github\.com[:/]([^/]+)\/([^/]+?)(?:\.git)?$/); + if (!match) { + throw new Error('Invalid GitHub URL format'); + } + return { + owner: match[1], + repo: match[2].replace(/\.git$/, '') + }; +} + +/** + * Auto-generate a branch name from a message + * @param {string} message - The agent message + * @returns {string} - Generated branch name + */ +function autogenerateBranchName(message) { + // Convert to lowercase, replace spaces/special chars with hyphens + let branchName = message + .toLowerCase() + .replace(/[^a-z0-9\s-]/g, '') // Remove special characters + .replace(/\s+/g, '-') // Replace spaces with hyphens + .replace(/-+/g, '-') // Replace multiple hyphens with single + .replace(/^-|-$/g, ''); // Remove leading/trailing hyphens + + // Ensure non-empty fallback + if (!branchName) { + branchName = 'task'; + } + + // Generate timestamp suffix (last 6 chars of base36 timestamp) + const timestamp = Date.now().toString(36).slice(-6); + const suffix = `-${timestamp}`; + + // Limit length to ensure total length including suffix fits within 50 characters + const maxBaseLength = 50 - suffix.length; + if (branchName.length > maxBaseLength) { + branchName = branchName.substring(0, maxBaseLength); + } + + // Remove any trailing hyphen after truncation and ensure no leading hyphen + branchName = branchName.replace(/-$/, '').replace(/^-+/, ''); + + // If still empty or starts with hyphen after cleanup, use fallback + if (!branchName || branchName.startsWith('-')) { + branchName = 'task'; + } + + // Combine base name with timestamp suffix + branchName = `${branchName}${suffix}`; + + // Final validation: ensure it matches safe pattern + if (!/^[a-z0-9]+(?:-[a-z0-9]+)*$/.test(branchName)) { + // Fallback to deterministic safe name + return `branch-${timestamp}`; + } + + return branchName; +} + +/** + * Validate a Git branch name + * @param {string} branchName - Branch name to validate + * @returns {{valid: boolean, error?: string}} - Validation result + */ +function validateBranchName(branchName) { + if (!branchName || branchName.trim() === '') { + return { valid: false, error: 'Branch name cannot be empty' }; + } + + // Git branch name rules + const invalidPatterns = [ + { pattern: /^\./, message: 'Branch name cannot start with a dot' }, + { pattern: /\.$/, message: 'Branch name cannot end with a dot' }, + { pattern: /\.\./, message: 'Branch name cannot contain consecutive dots (..)' }, + { pattern: /\s/, message: 'Branch name cannot contain spaces' }, + { pattern: /[~^:?*\[\\]/, message: 'Branch name cannot contain special characters: ~ ^ : ? * [ \\' }, + { pattern: /@{/, message: 'Branch name cannot contain @{' }, + { pattern: /\/$/, message: 'Branch name cannot end with a slash' }, + { pattern: /^\//, message: 'Branch name cannot start with a slash' }, + { pattern: /\/\//, message: 'Branch name cannot contain consecutive slashes' }, + { pattern: /\.lock$/, message: 'Branch name cannot end with .lock' } + ]; + + for (const { pattern, message } of invalidPatterns) { + if (pattern.test(branchName)) { + return { valid: false, error: message }; + } + } + + // Check for ASCII control characters + if (/[\x00-\x1F\x7F]/.test(branchName)) { + return { valid: false, error: 'Branch name cannot contain control characters' }; + } + + return { valid: true }; +} + +/** + * Get recent commit messages from a repository + * @param {string} projectPath - Path to the git repository + * @param {number} limit - Number of commits to retrieve (default: 5) + * @returns {Promise} - Array of commit messages + */ +async function getCommitMessages(projectPath, limit = 5) { + return new Promise((resolve, reject) => { + const gitProcess = spawn('git', ['log', `-${limit}`, '--pretty=format:%s'], { + cwd: projectPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + gitProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + gitProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + gitProcess.on('close', (code) => { + if (code === 0) { + const messages = stdout.trim().split('\n').filter(msg => msg.length > 0); + resolve(messages); + } else { + reject(new Error(`Failed to get commit messages: ${stderr}`)); + } + }); + + gitProcess.on('error', (error) => { + reject(new Error(`Failed to execute git: ${error.message}`)); + }); + }); +} + +/** + * Create a new branch on GitHub using the API + * @param {Octokit} octokit - Octokit instance + * @param {string} owner - Repository owner + * @param {string} repo - Repository name + * @param {string} branchName - Name of the new branch + * @param {string} baseBranch - Base branch to branch from (default: 'main') + * @returns {Promise} + */ +async function createGitHubBranch(octokit, owner, repo, branchName, baseBranch = 'main') { + try { + // Get the SHA of the base branch + const { data: ref } = await octokit.git.getRef({ + owner, + repo, + ref: `heads/${baseBranch}` + }); + + const baseSha = ref.object.sha; + + // Create the new branch + await octokit.git.createRef({ + owner, + repo, + ref: `refs/heads/${branchName}`, + sha: baseSha + }); + + console.log(`✅ Created branch '${branchName}' on GitHub`); + } catch (error) { + if (error.status === 422 && error.message.includes('Reference already exists')) { + console.log(`ℹ️ Branch '${branchName}' already exists on GitHub`); + } else { + throw error; + } + } +} + +/** + * Create a pull request on GitHub + * @param {Octokit} octokit - Octokit instance + * @param {string} owner - Repository owner + * @param {string} repo - Repository name + * @param {string} branchName - Head branch name + * @param {string} title - PR title + * @param {string} body - PR body/description + * @param {string} baseBranch - Base branch (default: 'main') + * @returns {Promise<{number: number, url: string}>} - PR number and URL + */ +async function createGitHubPR(octokit, owner, repo, branchName, title, body, baseBranch = 'main') { + const { data: pr } = await octokit.pulls.create({ + owner, + repo, + title, + head: branchName, + base: baseBranch, + body + }); + + console.log(`✅ Created pull request #${pr.number}: ${pr.html_url}`); + + return { + number: pr.number, + url: pr.html_url + }; +} + +/** + * Clone a GitHub repository to a directory + * @param {string} githubUrl - GitHub repository URL + * @param {string} githubToken - Optional GitHub token for private repos + * @param {string} projectPath - Path for cloning the repository + * @returns {Promise} - Path to the cloned repository + */ +async function cloneGitHubRepo(githubUrl, githubToken = null, projectPath) { + return new Promise(async (resolve, reject) => { + try { + // Validate GitHub URL + if (!githubUrl || !githubUrl.includes('github.com')) { + throw new Error('Invalid GitHub URL'); + } + + const cloneDir = path.resolve(projectPath); + + // Check if directory already exists + try { + await fs.access(cloneDir); + // Directory exists - check if it's a git repo with the same URL + try { + const existingUrl = await getGitRemoteUrl(cloneDir); + const normalizedExisting = normalizeGitHubUrl(existingUrl); + const normalizedRequested = normalizeGitHubUrl(githubUrl); + + if (normalizedExisting === normalizedRequested) { + console.log('✅ Repository already exists at path with correct URL'); + return resolve(cloneDir); + } else { + throw new Error(`Directory ${cloneDir} already exists with a different repository (${existingUrl}). Expected: ${githubUrl}`); + } + } catch (gitError) { + throw new Error(`Directory ${cloneDir} already exists but is not a valid git repository or git command failed`); + } + } catch (accessError) { + // Directory doesn't exist - proceed with clone + } + + // Ensure parent directory exists + await fs.mkdir(path.dirname(cloneDir), { recursive: true }); + + // Prepare the git clone URL with authentication if token is provided + let cloneUrl = githubUrl; + if (githubToken) { + // Convert HTTPS URL to authenticated URL + // Example: https://github.com/user/repo -> https://token@github.com/user/repo + cloneUrl = githubUrl.replace('https://github.com', `https://${githubToken}@github.com`); + } + + console.log('🔄 Cloning repository:', githubUrl); + console.log('📁 Destination:', cloneDir); + + // Execute git clone + const gitProcess = spawn('git', ['clone', '--depth', '1', cloneUrl, cloneDir], { + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + gitProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + gitProcess.stderr.on('data', (data) => { + stderr += data.toString(); + console.log('Git stderr:', data.toString()); + }); + + gitProcess.on('close', (code) => { + if (code === 0) { + console.log('✅ Repository cloned successfully'); + resolve(cloneDir); + } else { + console.error('❌ Git clone failed:', stderr); + reject(new Error(`Git clone failed: ${stderr}`)); + } + }); + + gitProcess.on('error', (error) => { + reject(new Error(`Failed to execute git: ${error.message}`)); + }); + } catch (error) { + reject(error); + } + }); +} + +/** + * Clean up a temporary project directory and its Claude session + * @param {string} projectPath - Path to the project directory + * @param {string} sessionId - Session ID to clean up + */ +async function cleanupProject(projectPath, sessionId = null) { + try { + // Only clean up projects in the external-projects directory + if (!projectPath.includes('.claude/external-projects')) { + console.warn('⚠️ Refusing to clean up non-external project:', projectPath); + return; + } + + console.log('🧹 Cleaning up project:', projectPath); + await fs.rm(projectPath, { recursive: true, force: true }); + console.log('✅ Project cleaned up'); + + // Also clean up the Claude session directory if sessionId provided + if (sessionId) { + try { + const sessionPath = path.join(os.homedir(), '.claude', 'sessions', sessionId); + console.log('🧹 Cleaning up session directory:', sessionPath); + await fs.rm(sessionPath, { recursive: true, force: true }); + console.log('✅ Session directory cleaned up'); + } catch (error) { + console.error('⚠️ Failed to clean up session directory:', error.message); + } + } + } catch (error) { + console.error('❌ Failed to clean up project:', error); + } +} + +/** + * SSE Stream Writer - Adapts SDK/CLI output to Server-Sent Events + */ +class SSEStreamWriter { + constructor(res, userId = null) { + this.res = res; + this.sessionId = null; + this.userId = userId; + this.isSSEStreamWriter = true; // Marker for transport detection + } + + send(data) { + if (this.res.writableEnded) { + return; + } + + // Format as SSE - providers send raw objects, we stringify + this.res.write(`data: ${JSON.stringify(data)}\n\n`); + } + + end() { + if (!this.res.writableEnded) { + this.res.write('data: {"type":"done"}\n\n'); + this.res.end(); + } + } + + setSessionId(sessionId) { + this.sessionId = sessionId; + } + + getSessionId() { + return this.sessionId; + } +} + +/** + * Non-streaming response collector + */ +class ResponseCollector { + constructor(userId = null) { + this.messages = []; + this.sessionId = null; + this.userId = userId; + } + + send(data) { + // Store ALL messages for now - we'll filter when returning + this.messages.push(data); + + // Extract sessionId if present + if (typeof data === 'string') { + try { + const parsed = JSON.parse(data); + if (parsed.sessionId) { + this.sessionId = parsed.sessionId; + } + } catch (e) { + // Not JSON, ignore + } + } else if (data && data.sessionId) { + this.sessionId = data.sessionId; + } + } + + end() { + // Do nothing - we'll collect all messages + } + + setSessionId(sessionId) { + this.sessionId = sessionId; + } + + getSessionId() { + return this.sessionId; + } + + getMessages() { + return this.messages; + } + + /** + * Get filtered assistant messages only + */ + getAssistantMessages() { + const assistantMessages = []; + + for (const msg of this.messages) { + // Skip initial status message + if (msg && msg.type === 'status') { + continue; + } + + // Handle JSON strings + if (typeof msg === 'string') { + try { + const parsed = JSON.parse(msg); + // Only include claude-response messages with assistant type + if (parsed.type === 'claude-response' && parsed.data && parsed.data.type === 'assistant') { + assistantMessages.push(parsed.data); + } + } catch (e) { + // Not JSON, skip + } + } + } + + return assistantMessages; + } + + /** + * Calculate total tokens from all messages + */ + getTotalTokens() { + let totalInput = 0; + let totalOutput = 0; + let totalCacheRead = 0; + let totalCacheCreation = 0; + + for (const msg of this.messages) { + let data = msg; + + // Parse if string + if (typeof msg === 'string') { + try { + data = JSON.parse(msg); + } catch (e) { + continue; + } + } + + // Extract usage from claude-response messages + if (data && data.type === 'claude-response' && data.data) { + const msgData = data.data; + if (msgData.message && msgData.message.usage) { + const usage = msgData.message.usage; + totalInput += usage.input_tokens || 0; + totalOutput += usage.output_tokens || 0; + totalCacheRead += usage.cache_read_input_tokens || 0; + totalCacheCreation += usage.cache_creation_input_tokens || 0; + } + } + } + + return { + inputTokens: totalInput, + outputTokens: totalOutput, + cacheReadTokens: totalCacheRead, + cacheCreationTokens: totalCacheCreation, + totalTokens: totalInput + totalOutput + totalCacheRead + totalCacheCreation + }; + } +} + +// =============================== +// External API Endpoint +// =============================== + +/** + * POST /api/agent + * + * Trigger an AI agent (Claude or Cursor) to work on a project. + * Supports automatic GitHub branch and pull request creation after successful completion. + * + * ================================================================================================ + * REQUEST BODY PARAMETERS + * ================================================================================================ + * + * @param {string} githubUrl - (Conditionally Required) GitHub repository URL to clone. + * Supported formats: + * - HTTPS: https://github.com/owner/repo + * - HTTPS with .git: https://github.com/owner/repo.git + * - SSH: git@github.com:owner/repo + * - SSH with .git: git@github.com:owner/repo.git + * + * @param {string} projectPath - (Conditionally Required) Path to existing project OR destination for cloning. + * Behavior depends on usage: + * - If used alone: Must point to existing project directory + * - If used with githubUrl: Target location for cloning + * - If omitted with githubUrl: Auto-generates temporary path in ~/.claude/external-projects/ + * + * @param {string} message - (Required) Task description for the AI agent. Used as: + * - Instructions for the agent + * - Source for auto-generated branch names (if createBranch=true and no branchName) + * - Fallback for PR title if no commits are made + * + * @param {string} provider - (Optional) AI provider to use. Options: 'claude' | 'cursor' | 'codex' | 'gemini' + * Default: 'claude' + * + * @param {boolean} stream - (Optional) Enable Server-Sent Events (SSE) streaming for real-time updates. + * Default: true + * - true: Returns text/event-stream with incremental updates + * - false: Returns complete JSON response after completion + * + * @param {string} model - (Optional) Model identifier for providers. + * + * Claude models: 'sonnet' (default), 'opus', 'haiku', 'opusplan', 'sonnet[1m]' + * Cursor models: 'gpt-5' (default), 'gpt-5.2', 'gpt-5.2-high', 'sonnet-4.5', 'opus-4.5', + * 'gemini-3-pro', 'composer-1', 'auto', 'gpt-5.1', 'gpt-5.1-high', + * 'gpt-5.1-codex', 'gpt-5.1-codex-high', 'gpt-5.1-codex-max', + * 'gpt-5.1-codex-max-high', 'opus-4.1', 'grok', and thinking variants + * Codex models: 'gpt-5.2' (default), 'gpt-5.1-codex-max', 'o3', 'o4-mini' + * + * @param {boolean} cleanup - (Optional) Auto-cleanup project directory after completion. + * Default: true + * Behavior: + * - Only applies when cloning via githubUrl (not for existing projectPath) + * - Deletes cloned repository after 5 seconds + * - Also deletes associated Claude session directory + * - Remote branch and PR remain on GitHub if created + * + * @param {string} githubToken - (Optional) GitHub Personal Access Token for authentication. + * Overrides stored token from user settings. + * Required for: + * - Private repositories + * - Branch/PR creation features + * Token must have 'repo' scope for full functionality. + * + * @param {string} branchName - (Optional) Custom name for the Git branch. + * If provided, createBranch is automatically set to true. + * Validation rules (errors returned if violated): + * - Cannot be empty or whitespace only + * - Cannot start or end with dot (.) + * - Cannot contain consecutive dots (..) + * - Cannot contain spaces + * - Cannot contain special characters: ~ ^ : ? * [ \ + * - Cannot contain @{ + * - Cannot start or end with forward slash (/) + * - Cannot contain consecutive slashes (//) + * - Cannot end with .lock + * - Cannot contain ASCII control characters + * Examples: 'feature/user-auth', 'bugfix/login-error', 'refactor/db-optimization' + * + * @param {boolean} createBranch - (Optional) Create a new Git branch after successful agent completion. + * Default: false (or true if branchName is provided) + * Behavior: + * - Creates branch locally and pushes to remote + * - If branch exists locally: Checks out existing branch (no error) + * - If branch exists on remote: Uses existing branch (no error) + * - Branch name: Custom (if branchName provided) or auto-generated from message + * - Requires either githubUrl OR projectPath with GitHub remote + * + * @param {boolean} createPR - (Optional) Create a GitHub Pull Request after successful completion. + * Default: false + * Behavior: + * - PR title: First commit message (or fallback to message parameter) + * - PR description: Auto-generated from all commit messages + * - Base branch: Always 'main' (currently hardcoded) + * - If PR already exists: GitHub returns error with details + * - Requires either githubUrl OR projectPath with GitHub remote + * + * ================================================================================================ + * PATH HANDLING BEHAVIOR + * ================================================================================================ + * + * Scenario 1: Only githubUrl provided + * Input: { githubUrl: "https://github.com/owner/repo" } + * Action: Clones to auto-generated temporary path: ~/.claude/external-projects// + * Cleanup: Yes (if cleanup=true) + * + * Scenario 2: Only projectPath provided + * Input: { projectPath: "/home/user/my-project" } + * Action: Uses existing project at specified path + * Validation: Path must exist and be accessible + * Cleanup: No (never cleanup existing projects) + * + * Scenario 3: Both githubUrl and projectPath provided + * Input: { githubUrl: "https://github.com/owner/repo", projectPath: "/custom/path" } + * Action: Clones githubUrl to projectPath location + * Validation: + * - If projectPath exists with git repo: + * - Compares remote URL with githubUrl + * - If URLs match: Reuses existing repo + * - If URLs differ: Returns error + * Cleanup: Yes (if cleanup=true) + * + * ================================================================================================ + * GITHUB BRANCH/PR CREATION REQUIREMENTS + * ================================================================================================ + * + * For createBranch or createPR to work, one of the following must be true: + * + * Option A: githubUrl provided + * - Repository URL directly specified + * - Works with both cloning and existing paths + * + * Option B: projectPath with GitHub remote + * - Project must be a Git repository + * - Must have 'origin' remote configured + * - Remote URL must point to github.com + * - System auto-detects GitHub URL via: git remote get-url origin + * + * Additional Requirements: + * - Valid GitHub token (from settings or githubToken parameter) + * - Token must have 'repo' scope for private repos + * - Project must have commits (for PR creation) + * + * ================================================================================================ + * VALIDATION & ERROR HANDLING + * ================================================================================================ + * + * Input Validations (400 Bad Request): + * - Either githubUrl OR projectPath must be provided (not neither) + * - message must be non-empty string + * - provider must be 'claude', 'cursor', 'codex', or 'gemini' + * - createBranch/createPR requires githubUrl OR projectPath (not neither) + * - branchName must pass Git naming rules (if provided) + * + * Runtime Validations (500 Internal Server Error or specific error in response): + * - projectPath must exist (if used alone) + * - GitHub URL format must be valid + * - Git remote URL must include github.com (for projectPath + branch/PR) + * - GitHub token must be available (for private repos and branch/PR) + * - Directory conflicts handled (existing path with different repo) + * + * Branch Name Validation Errors (returned in response, not HTTP error): + * Invalid names return: { branch: { error: "Invalid branch name: " } } + * Examples: + * - "my branch" → "Branch name cannot contain spaces" + * - ".feature" → "Branch name cannot start with a dot" + * - "feature.lock" → "Branch name cannot end with .lock" + * + * ================================================================================================ + * RESPONSE FORMATS + * ================================================================================================ + * + * Streaming Response (stream=true): + * Content-Type: text/event-stream + * Events: + * - { type: "status", message: "...", projectPath: "..." } + * - { type: "claude-response", data: {...} } + * - { type: "github-branch", branch: { name: "...", url: "..." } } + * - { type: "github-pr", pullRequest: { number: 42, url: "..." } } + * - { type: "github-error", error: "..." } + * - { type: "done" } + * + * Non-Streaming Response (stream=false): + * Content-Type: application/json + * { + * success: true, + * sessionId: "session-123", + * messages: [...], // Assistant messages only (filtered) + * tokens: { + * inputTokens: 150, + * outputTokens: 50, + * cacheReadTokens: 0, + * cacheCreationTokens: 0, + * totalTokens: 200 + * }, + * projectPath: "/path/to/project", + * branch: { // Only if createBranch=true + * name: "feature/xyz", + * url: "https://github.com/owner/repo/tree/feature/xyz" + * } | { error: "..." }, + * pullRequest: { // Only if createPR=true + * number: 42, + * url: "https://github.com/owner/repo/pull/42" + * } | { error: "..." } + * } + * + * Error Response: + * HTTP Status: 400, 401, 500 + * Content-Type: application/json + * { success: false, error: "Error description" } + * + * ================================================================================================ + * EXAMPLES + * ================================================================================================ + * + * Example 1: Clone and process with auto-cleanup + * POST /api/agent + * { "githubUrl": "https://github.com/user/repo", "message": "Fix bug" } + * + * Example 2: Use existing project with custom branch and PR + * POST /api/agent + * { + * "projectPath": "/home/user/project", + * "message": "Add feature", + * "branchName": "feature/new-feature", + * "createPR": true + * } + * + * Example 3: Clone to specific path with auto-generated branch + * POST /api/agent + * { + * "githubUrl": "https://github.com/user/repo", + * "projectPath": "/tmp/work", + * "message": "Refactor code", + * "createBranch": true, + * "cleanup": false + * } + */ +router.post('/', validateExternalApiKey, async (req, res) => { + const { githubUrl, projectPath, message, provider = 'claude', model, githubToken, branchName } = req.body; + + // Parse stream and cleanup as booleans (handle string "true"/"false" from curl) + const stream = req.body.stream === undefined ? true : (req.body.stream === true || req.body.stream === 'true'); + const cleanup = req.body.cleanup === undefined ? true : (req.body.cleanup === true || req.body.cleanup === 'true'); + + // If branchName is provided, automatically enable createBranch + const createBranch = branchName ? true : (req.body.createBranch === true || req.body.createBranch === 'true'); + const createPR = req.body.createPR === true || req.body.createPR === 'true'; + + // Validate inputs + if (!githubUrl && !projectPath) { + return res.status(400).json({ error: 'Either githubUrl or projectPath is required' }); + } + + if (!message || !message.trim()) { + return res.status(400).json({ error: 'message is required' }); + } + + if (!['claude', 'cursor', 'codex', 'gemini'].includes(provider)) { + return res.status(400).json({ error: 'provider must be "claude", "cursor", "codex", or "gemini"' }); + } + + // Validate GitHub branch/PR creation requirements + // Allow branch/PR creation with projectPath as long as it has a GitHub remote + if ((createBranch || createPR) && !githubUrl && !projectPath) { + return res.status(400).json({ error: 'createBranch and createPR require either githubUrl or projectPath with a GitHub remote' }); + } + + let finalProjectPath = null; + let writer = null; + + try { + // Determine the final project path + if (githubUrl) { + // Clone repository (to projectPath if provided, otherwise generate path) + // TODO: use credinitalsDB when refactoring + const tokenToUse = githubToken || githubTokensDb.getActiveGithubToken(req.user.id); + + let targetPath; + if (projectPath) { + targetPath = projectPath; + } else { + // Generate a unique path for cloning + const repoHash = crypto.createHash('md5').update(githubUrl + Date.now()).digest('hex'); + targetPath = path.join(os.homedir(), '.claude', 'external-projects', repoHash); + } + + finalProjectPath = await cloneGitHubRepo(githubUrl.trim(), tokenToUse, targetPath); + } else { + // Use existing project path + finalProjectPath = path.resolve(projectPath); + + // Verify the path exists + try { + await fs.access(finalProjectPath); + } catch (error) { + throw new Error(`Project path does not exist: ${finalProjectPath}`); + } + } + + // Register the project (or use existing registration) + let project; + try { + project = await addProjectManually(finalProjectPath); + console.log('📦 Project registered:', project); + } catch (error) { + // If project already exists, that's fine - continue with the existing registration + if (error.message && error.message.includes('Project already configured')) { + console.log('📦 Using existing project registration for:', finalProjectPath); + project = { path: finalProjectPath }; + } else { + throw error; + } + } + + // Set up writer based on streaming mode + if (stream) { + // Set up SSE headers for streaming + res.setHeader('Content-Type', 'text/event-stream'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + res.setHeader('X-Accel-Buffering', 'no'); // Disable nginx buffering + + writer = new SSEStreamWriter(res, req.user.id); + + // Send initial status + writer.send({ + type: 'status', + message: githubUrl ? 'Repository cloned and session started' : 'Session started', + projectPath: finalProjectPath + }); + } else { + // Non-streaming mode: collect messages + writer = new ResponseCollector(req.user.id); + + // Collect initial status message + writer.send({ + type: 'status', + message: githubUrl ? 'Repository cloned and session started' : 'Session started', + projectPath: finalProjectPath + }); + } + + // Start the appropriate session + if (provider === 'claude') { + console.log('🤖 Starting Claude SDK session'); + + await queryClaudeSDK(message.trim(), { + projectPath: finalProjectPath, + cwd: finalProjectPath, + sessionId: null, // New session + model: model, + permissionMode: 'bypassPermissions' // Bypass all permissions for API calls + }, writer); + + } else if (provider === 'cursor') { + console.log('🖱️ Starting Cursor CLI session'); + + await spawnCursor(message.trim(), { + projectPath: finalProjectPath, + cwd: finalProjectPath, + sessionId: null, // New session + model: model || undefined, + skipPermissions: true // Bypass permissions for Cursor + }, writer); + } else if (provider === 'codex') { + console.log('🤖 Starting Codex SDK session'); + + await queryCodex(message.trim(), { + projectPath: finalProjectPath, + cwd: finalProjectPath, + sessionId: null, + model: model || CODEX_MODELS.DEFAULT, + permissionMode: 'bypassPermissions' + }, writer); + } else if (provider === 'gemini') { + console.log('✨ Starting Gemini CLI session'); + + await spawnGemini(message.trim(), { + projectPath: finalProjectPath, + cwd: finalProjectPath, + sessionId: null, + model: model, + skipPermissions: true // CLI mode bypasses permissions + }, writer); + } + + // Handle GitHub branch and PR creation after successful agent completion + let branchInfo = null; + let prInfo = null; + + if (createBranch || createPR) { + try { + console.log('🔄 Starting GitHub branch/PR creation workflow...'); + + // Get GitHub token + // TODO: use credinitalsDB when refactoring + const tokenToUse = githubToken || githubTokensDb.getActiveGithubToken(req.user.id); + + if (!tokenToUse) { + throw new Error('GitHub token required for branch/PR creation. Please configure a GitHub token in settings.'); + } + + // Initialize Octokit + const octokit = new Octokit({ auth: tokenToUse }); + + // Get GitHub URL - either from parameter or from git remote + let repoUrl = githubUrl; + if (!repoUrl) { + console.log('🔍 Getting GitHub URL from git remote...'); + try { + repoUrl = await getGitRemoteUrl(finalProjectPath); + if (!repoUrl.includes('github.com')) { + throw new Error('Project does not have a GitHub remote configured'); + } + console.log(`✅ Found GitHub remote: ${repoUrl}`); + } catch (error) { + throw new Error(`Failed to get GitHub remote URL: ${error.message}`); + } + } + + // Parse GitHub URL to get owner and repo + const { owner, repo } = parseGitHubUrl(repoUrl); + console.log(`📦 Repository: ${owner}/${repo}`); + + // Use provided branch name or auto-generate from message + const finalBranchName = branchName || autogenerateBranchName(message); + if (branchName) { + console.log(`🌿 Using provided branch name: ${finalBranchName}`); + + // Validate custom branch name + const validation = validateBranchName(finalBranchName); + if (!validation.valid) { + throw new Error(`Invalid branch name: ${validation.error}`); + } + } else { + console.log(`🌿 Auto-generated branch name: ${finalBranchName}`); + } + + if (createBranch) { + // Create and checkout the new branch locally + console.log('🔄 Creating local branch...'); + const checkoutProcess = spawn('git', ['checkout', '-b', finalBranchName], { + cwd: finalProjectPath, + stdio: 'pipe' + }); + + await new Promise((resolve, reject) => { + let stderr = ''; + checkoutProcess.stderr.on('data', (data) => { stderr += data.toString(); }); + checkoutProcess.on('close', (code) => { + if (code === 0) { + console.log(`✅ Created and checked out local branch '${finalBranchName}'`); + resolve(); + } else { + // Branch might already exist locally, try to checkout + if (stderr.includes('already exists')) { + console.log(`ℹ️ Branch '${finalBranchName}' already exists locally, checking out...`); + const checkoutExisting = spawn('git', ['checkout', finalBranchName], { + cwd: finalProjectPath, + stdio: 'pipe' + }); + checkoutExisting.on('close', (checkoutCode) => { + if (checkoutCode === 0) { + console.log(`✅ Checked out existing branch '${finalBranchName}'`); + resolve(); + } else { + reject(new Error(`Failed to checkout existing branch: ${stderr}`)); + } + }); + } else { + reject(new Error(`Failed to create branch: ${stderr}`)); + } + } + }); + }); + + // Push the branch to remote + console.log('🔄 Pushing branch to remote...'); + const pushProcess = spawn('git', ['push', '-u', 'origin', finalBranchName], { + cwd: finalProjectPath, + stdio: 'pipe' + }); + + await new Promise((resolve, reject) => { + let stderr = ''; + let stdout = ''; + pushProcess.stdout.on('data', (data) => { stdout += data.toString(); }); + pushProcess.stderr.on('data', (data) => { stderr += data.toString(); }); + pushProcess.on('close', (code) => { + if (code === 0) { + console.log(`✅ Pushed branch '${finalBranchName}' to remote`); + resolve(); + } else { + // Check if branch exists on remote but has different commits + if (stderr.includes('already exists') || stderr.includes('up-to-date')) { + console.log(`ℹ️ Branch '${finalBranchName}' already exists on remote, using existing branch`); + resolve(); + } else { + reject(new Error(`Failed to push branch: ${stderr}`)); + } + } + }); + }); + + branchInfo = { + name: finalBranchName, + url: `https://github.com/${owner}/${repo}/tree/${finalBranchName}` + }; + } + + if (createPR) { + // Get commit messages to generate PR description + console.log('🔄 Generating PR title and description...'); + const commitMessages = await getCommitMessages(finalProjectPath, 5); + + // Use the first commit message as the PR title, or fallback to the agent message + const prTitle = commitMessages.length > 0 ? commitMessages[0] : message; + + // Generate PR body from commit messages + let prBody = '## Changes\n\n'; + if (commitMessages.length > 0) { + prBody += commitMessages.map(msg => `- ${msg}`).join('\n'); + } else { + prBody += `Agent task: ${message}`; + } + prBody += '\n\n---\n*This pull request was automatically created by Claude Code UI Agent.*'; + + console.log(`📝 PR Title: ${prTitle}`); + + // Create the pull request + console.log('🔄 Creating pull request...'); + prInfo = await createGitHubPR(octokit, owner, repo, finalBranchName, prTitle, prBody, 'main'); + } + + // Send branch/PR info in response + if (stream) { + if (branchInfo) { + writer.send({ + type: 'github-branch', + branch: branchInfo + }); + } + if (prInfo) { + writer.send({ + type: 'github-pr', + pullRequest: prInfo + }); + } + } + + } catch (error) { + console.error('❌ GitHub branch/PR creation error:', error); + + // Send error but don't fail the entire request + if (stream) { + writer.send({ + type: 'github-error', + error: error.message + }); + } + // Store error info for non-streaming response + if (!stream) { + branchInfo = { error: error.message }; + prInfo = { error: error.message }; + } + } + } + + // Handle response based on streaming mode + if (stream) { + // Streaming mode: end the SSE stream + writer.end(); + } else { + // Non-streaming mode: send filtered messages and token summary as JSON + const assistantMessages = writer.getAssistantMessages(); + const tokenSummary = writer.getTotalTokens(); + + const response = { + success: true, + sessionId: writer.getSessionId(), + messages: assistantMessages, + tokens: tokenSummary, + projectPath: finalProjectPath + }; + + // Add branch/PR info if created + if (branchInfo) { + response.branch = branchInfo; + } + if (prInfo) { + response.pullRequest = prInfo; + } + + res.json(response); + } + + // Clean up if requested + if (cleanup && githubUrl) { + // Only cleanup if we cloned a repo (not for existing project paths) + const sessionIdForCleanup = writer.getSessionId(); + setTimeout(() => { + cleanupProject(finalProjectPath, sessionIdForCleanup); + }, 5000); + } + + } catch (error) { + console.error('❌ External session error:', error); + + // Clean up on error + if (finalProjectPath && cleanup && githubUrl) { + const sessionIdForCleanup = writer ? writer.getSessionId() : null; + cleanupProject(finalProjectPath, sessionIdForCleanup); + } + + if (stream) { + // For streaming, send error event and stop + if (!writer) { + // Set up SSE headers if not already done + res.setHeader('Content-Type', 'text/event-stream'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + res.setHeader('X-Accel-Buffering', 'no'); + writer = new SSEStreamWriter(res, req.user.id); + } + + if (!res.writableEnded) { + writer.send({ + type: 'error', + error: error.message, + message: `Failed: ${error.message}` + }); + writer.end(); + } + } else if (!res.headersSent) { + res.status(500).json({ + success: false, + error: error.message + }); + } + } +}); + +export default router; diff --git a/server/src/modules/api-keys/api-keys.routes.js b/server/src/modules/api-keys/api-keys.routes.js new file mode 100644 index 00000000..f1e140d2 --- /dev/null +++ b/server/src/modules/api-keys/api-keys.routes.js @@ -0,0 +1,86 @@ +import express from 'express'; +import { apiKeysDb } from '../../../database/db.js'; + +const router = express.Router(); + +// =============================== +// API Keys Management +// =============================== + +// Get all API keys for the authenticated user +router.get('/', async (req, res) => { + try { + const apiKeys = apiKeysDb.getApiKeys(req.user.id); + // Don't send the full API key in the list for security + const sanitizedKeys = apiKeys.map(key => ({ + ...key, + api_key: key.api_key.substring(0, 10) + '...' + })); + res.json({ apiKeys: sanitizedKeys }); + } catch (error) { + console.error('Error fetching API keys:', error); + res.status(500).json({ error: 'Failed to fetch API keys' }); + } +}); + +// Create a new API key +router.post('/', async (req, res) => { + try { + const { keyName } = req.body; + + if (!keyName || !keyName.trim()) { + return res.status(400).json({ error: 'Key name is required' }); + } + + const result = apiKeysDb.createApiKey(req.user.id, keyName.trim()); + res.json({ + success: true, + apiKey: result + }); + } catch (error) { + console.error('Error creating API key:', error); + res.status(500).json({ error: 'Failed to create API key' }); + } +}); + +// Delete an API key +router.delete('/:keyId', async (req, res) => { + try { + const { keyId } = req.params; + const success = apiKeysDb.deleteApiKey(req.user.id, parseInt(keyId)); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'API key not found' }); + } + } catch (error) { + console.error('Error deleting API key:', error); + res.status(500).json({ error: 'Failed to delete API key' }); + } +}); + +// Toggle API key active status +router.patch('/:keyId/toggle', async (req, res) => { + try { + const { keyId } = req.params; + const { isActive } = req.body; + + if (typeof isActive !== 'boolean') { + return res.status(400).json({ error: 'isActive must be a boolean' }); + } + + const success = apiKeysDb.toggleApiKey(req.user.id, parseInt(keyId), isActive); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'API key not found' }); + } + } catch (error) { + console.error('Error toggling API key:', error); + res.status(500).json({ error: 'Failed to toggle API key' }); + } +}); + +export default router; diff --git a/server/src/modules/cli-auth/cli-auth.routes.js b/server/src/modules/cli-auth/cli-auth.routes.js new file mode 100644 index 00000000..78ffa30b --- /dev/null +++ b/server/src/modules/cli-auth/cli-auth.routes.js @@ -0,0 +1,434 @@ +import express from 'express'; +import { spawn } from 'child_process'; +import fs from 'fs/promises'; +import path from 'path'; +import os from 'os'; + +const router = express.Router(); + +router.get('/claude/status', async (req, res) => { + try { + const credentialsResult = await checkClaudeCredentials(); + + if (credentialsResult.authenticated) { + return res.json({ + authenticated: true, + email: credentialsResult.email || 'Authenticated', + method: credentialsResult.method // 'api_key' or 'credentials_file' + }); + } + + return res.json({ + authenticated: false, + email: null, + method: null, + error: credentialsResult.error || 'Not authenticated' + }); + + } catch (error) { + console.error('Error checking Claude auth status:', error); + res.status(500).json({ + authenticated: false, + email: null, + method: null, + error: error.message + }); + } +}); + +router.get('/cursor/status', async (req, res) => { + try { + const result = await checkCursorStatus(); + + res.json({ + authenticated: result.authenticated, + email: result.email, + error: result.error + }); + + } catch (error) { + console.error('Error checking Cursor auth status:', error); + res.status(500).json({ + authenticated: false, + email: null, + error: error.message + }); + } +}); + +router.get('/codex/status', async (req, res) => { + try { + const result = await checkCodexCredentials(); + + res.json({ + authenticated: result.authenticated, + email: result.email, + error: result.error + }); + + } catch (error) { + console.error('Error checking Codex auth status:', error); + res.status(500).json({ + authenticated: false, + email: null, + error: error.message + }); + } +}); + +router.get('/gemini/status', async (req, res) => { + try { + const result = await checkGeminiCredentials(); + + res.json({ + authenticated: result.authenticated, + email: result.email, + error: result.error + }); + + } catch (error) { + console.error('Error checking Gemini auth status:', error); + res.status(500).json({ + authenticated: false, + email: null, + error: error.message + }); + } +}); + +async function loadClaudeSettingsEnv() { + try { + const settingsPath = path.join(os.homedir(), '.claude', 'settings.json'); + const content = await fs.readFile(settingsPath, 'utf8'); + const settings = JSON.parse(content); + + if (settings?.env && typeof settings.env === 'object') { + return settings.env; + } + } catch (error) { + // Ignore missing or malformed settings and fall back to other auth sources. + } + + return {}; +} + +/** + * Checks Claude authentication credentials using two methods with priority order: + * + * Priority 1: ANTHROPIC_API_KEY environment variable + * Priority 1b: ~/.claude/settings.json env values + * Priority 2: ~/.claude/.credentials.json OAuth tokens + * + * The Claude Agent SDK prioritizes environment variables over authenticated subscriptions. + * This matching behavior ensures consistency with how the SDK authenticates. + * + * References: + * - https://support.claude.com/en/articles/12304248-managing-api-key-environment-variables-in-claude-code + * "Claude Code prioritizes environment variable API keys over authenticated subscriptions" + * - https://platform.claude.com/docs/en/agent-sdk/overview + * SDK authentication documentation + * + * @returns {Promise} Authentication status with { authenticated, email, method } + * - authenticated: boolean indicating if valid credentials exist + * - email: user email or auth method identifier + * - method: 'api_key' for env var, 'credentials_file' for OAuth tokens + */ +async function checkClaudeCredentials() { + // Priority 1: Check for ANTHROPIC_API_KEY environment variable + // The SDK checks this first and uses it if present, even if OAuth tokens exist. + // When set, API calls are charged via pay-as-you-go rates instead of subscription. + if (process.env.ANTHROPIC_API_KEY && process.env.ANTHROPIC_API_KEY.trim()) { + return { + authenticated: true, + email: 'API Key Auth', + method: 'api_key' + }; + } + + // Priority 1b: Check ~/.claude/settings.json env values. + // Claude Code can read proxy/auth values from settings.json even when the + // CloudCLI server process itself was not started with those env vars exported. + const settingsEnv = await loadClaudeSettingsEnv(); + + if (typeof settingsEnv.ANTHROPIC_API_KEY === 'string' && settingsEnv.ANTHROPIC_API_KEY.trim()) { + return { + authenticated: true, + email: 'API Key Auth', + method: 'api_key' + }; + } + + if (typeof settingsEnv.ANTHROPIC_AUTH_TOKEN === 'string' && settingsEnv.ANTHROPIC_AUTH_TOKEN.trim()) { + return { + authenticated: true, + email: 'Configured via settings.json', + method: 'api_key' + }; + } + + // Priority 2: Check ~/.claude/.credentials.json for OAuth tokens + // This is the standard authentication method used by Claude CLI after running + // 'claude /login' or 'claude setup-token' commands. + try { + const credPath = path.join(os.homedir(), '.claude', '.credentials.json'); + const content = await fs.readFile(credPath, 'utf8'); + const creds = JSON.parse(content); + + const oauth = creds.claudeAiOauth; + if (oauth && oauth.accessToken) { + const isExpired = oauth.expiresAt && Date.now() >= oauth.expiresAt; + + if (!isExpired) { + return { + authenticated: true, + email: creds.email || creds.user || null, + method: 'credentials_file' + }; + } + } + + return { + authenticated: false, + email: null, + method: null + }; + } catch (error) { + return { + authenticated: false, + email: null, + method: null + }; + } +} + +function checkCursorStatus() { + return new Promise((resolve) => { + let processCompleted = false; + + const timeout = setTimeout(() => { + if (!processCompleted) { + processCompleted = true; + if (childProcess) { + childProcess.kill(); + } + resolve({ + authenticated: false, + email: null, + error: 'Command timeout' + }); + } + }, 5000); + + let childProcess; + try { + childProcess = spawn('cursor-agent', ['status']); + } catch (err) { + clearTimeout(timeout); + processCompleted = true; + resolve({ + authenticated: false, + email: null, + error: 'Cursor CLI not found or not installed' + }); + return; + } + + let stdout = ''; + let stderr = ''; + + childProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + childProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + childProcess.on('close', (code) => { + if (processCompleted) return; + processCompleted = true; + clearTimeout(timeout); + + if (code === 0) { + const emailMatch = stdout.match(/Logged in as ([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,})/i); + + if (emailMatch) { + resolve({ + authenticated: true, + email: emailMatch[1], + output: stdout + }); + } else if (stdout.includes('Logged in')) { + resolve({ + authenticated: true, + email: 'Logged in', + output: stdout + }); + } else { + resolve({ + authenticated: false, + email: null, + error: 'Not logged in' + }); + } + } else { + resolve({ + authenticated: false, + email: null, + error: stderr || 'Not logged in' + }); + } + }); + + childProcess.on('error', (err) => { + if (processCompleted) return; + processCompleted = true; + clearTimeout(timeout); + + resolve({ + authenticated: false, + email: null, + error: 'Cursor CLI not found or not installed' + }); + }); + }); +} + +async function checkCodexCredentials() { + try { + const authPath = path.join(os.homedir(), '.codex', 'auth.json'); + const content = await fs.readFile(authPath, 'utf8'); + const auth = JSON.parse(content); + + // Tokens are nested under 'tokens' key + const tokens = auth.tokens || {}; + + // Check for valid tokens (id_token or access_token) + if (tokens.id_token || tokens.access_token) { + // Try to extract email from id_token JWT payload + let email = 'Authenticated'; + if (tokens.id_token) { + try { + // JWT is base64url encoded: header.payload.signature + const parts = tokens.id_token.split('.'); + if (parts.length >= 2) { + // Decode the payload (second part) + const payload = JSON.parse(Buffer.from(parts[1], 'base64url').toString('utf8')); + email = payload.email || payload.user || 'Authenticated'; + } + } catch { + // If JWT decoding fails, use fallback + email = 'Authenticated'; + } + } + + return { + authenticated: true, + email + }; + } + + // Also check for OPENAI_API_KEY as fallback auth method + if (auth.OPENAI_API_KEY) { + return { + authenticated: true, + email: 'API Key Auth' + }; + } + + return { + authenticated: false, + email: null, + error: 'No valid tokens found' + }; + } catch (error) { + if (error.code === 'ENOENT') { + return { + authenticated: false, + email: null, + error: 'Codex not configured' + }; + } + return { + authenticated: false, + email: null, + error: error.message + }; + } +} + +async function checkGeminiCredentials() { + if (process.env.GEMINI_API_KEY && process.env.GEMINI_API_KEY.trim()) { + return { + authenticated: true, + email: 'API Key Auth' + }; + } + + try { + const credsPath = path.join(os.homedir(), '.gemini', 'oauth_creds.json'); + const content = await fs.readFile(credsPath, 'utf8'); + const creds = JSON.parse(content); + + if (creds.access_token) { + let email = 'OAuth Session'; + + try { + // Validate token against Google API + const tokenRes = await fetch(`https://oauth2.googleapis.com/tokeninfo?access_token=${creds.access_token}`); + if (tokenRes.ok) { + const tokenInfo = await tokenRes.json(); + if (tokenInfo.email) { + email = tokenInfo.email; + } + } else if (!creds.refresh_token) { + // Token invalid and no refresh token available + return { + authenticated: false, + email: null, + error: 'Access token invalid and no refresh token found' + }; + } else { + // Token might be expired but we have a refresh token, so CLI will refresh it + try { + const accPath = path.join(os.homedir(), '.gemini', 'google_accounts.json'); + const accContent = await fs.readFile(accPath, 'utf8'); + const accounts = JSON.parse(accContent); + if (accounts.active) { + email = accounts.active; + } + } catch (e) { } + } + } catch (e) { + // Network error, fallback to checking local accounts file + try { + const accPath = path.join(os.homedir(), '.gemini', 'google_accounts.json'); + const accContent = await fs.readFile(accPath, 'utf8'); + const accounts = JSON.parse(accContent); + if (accounts.active) { + email = accounts.active; + } + } catch (err) { } + } + + return { + authenticated: true, + email: email + }; + } + + return { + authenticated: false, + email: null, + error: 'No valid tokens found in oauth_creds' + }; + } catch (error) { + return { + authenticated: false, + email: null, + error: 'Gemini CLI not configured' + }; + } +} + +export default router; diff --git a/server/src/modules/codex/codex.routes.js b/server/src/modules/codex/codex.routes.js new file mode 100644 index 00000000..e23bdd0b --- /dev/null +++ b/server/src/modules/codex/codex.routes.js @@ -0,0 +1,329 @@ +import express from 'express'; +import { spawn } from 'child_process'; +import { promises as fs } from 'fs'; +import path from 'path'; +import os from 'os'; +import TOML from '@iarna/toml'; +import { getCodexSessions, deleteCodexSession } from '../../../projects.js'; +import { applyCustomSessionNames, sessionNamesDb } from '../../../database/db.js'; + +const router = express.Router(); + +function createCliResponder(res) { + let responded = false; + return (status, payload) => { + if (responded || res.headersSent) { + return; + } + responded = true; + res.status(status).json(payload); + }; +} + +router.get('/config', async (req, res) => { + try { + const configPath = path.join(os.homedir(), '.codex', 'config.toml'); + const content = await fs.readFile(configPath, 'utf8'); + const config = TOML.parse(content); + + res.json({ + success: true, + config: { + model: config.model || null, + mcpServers: config.mcp_servers || {}, + approvalMode: config.approval_mode || 'suggest' + } + }); + } catch (error) { + if (error.code === 'ENOENT') { + res.json({ + success: true, + config: { + model: null, + mcpServers: {}, + approvalMode: 'suggest' + } + }); + } else { + console.error('Error reading Codex config:', error); + res.status(500).json({ success: false, error: error.message }); + } + } +}); + +router.get('/sessions', async (req, res) => { + try { + const { projectPath } = req.query; + + if (!projectPath) { + return res.status(400).json({ success: false, error: 'projectPath query parameter required' }); + } + + const sessions = await getCodexSessions(projectPath); + applyCustomSessionNames(sessions, 'codex'); + res.json({ success: true, sessions }); + } catch (error) { + console.error('Error fetching Codex sessions:', error); + res.status(500).json({ success: false, error: error.message }); + } +}); + +router.delete('/sessions/:sessionId', async (req, res) => { + try { + const { sessionId } = req.params; + await deleteCodexSession(sessionId); + sessionNamesDb.deleteName(sessionId, 'codex'); + res.json({ success: true }); + } catch (error) { + console.error(`Error deleting Codex session ${req.params.sessionId}:`, error); + res.status(500).json({ success: false, error: error.message }); + } +}); + +// MCP Server Management Routes + +router.get('/mcp/cli/list', async (req, res) => { + try { + const respond = createCliResponder(res); + const proc = spawn('codex', ['mcp', 'list'], { stdio: ['pipe', 'pipe', 'pipe'] }); + + let stdout = ''; + let stderr = ''; + + proc.stdout?.on('data', (data) => { stdout += data.toString(); }); + proc.stderr?.on('data', (data) => { stderr += data.toString(); }); + + proc.on('close', (code) => { + if (code === 0) { + respond(200, { success: true, output: stdout, servers: parseCodexListOutput(stdout) }); + } else { + respond(500, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` }); + } + }); + + proc.on('error', (error) => { + const isMissing = error?.code === 'ENOENT'; + respond(isMissing ? 503 : 500, { + error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI', + details: error.message, + code: error.code + }); + }); + } catch (error) { + res.status(500).json({ error: 'Failed to list MCP servers', details: error.message }); + } +}); + +router.post('/mcp/cli/add', async (req, res) => { + try { + const { name, command, args = [], env = {} } = req.body; + + if (!name || !command) { + return res.status(400).json({ error: 'name and command are required' }); + } + + // Build: codex mcp add [-e KEY=VAL]... -- [args...] + let cliArgs = ['mcp', 'add', name]; + + Object.entries(env).forEach(([key, value]) => { + cliArgs.push('-e', `${key}=${value}`); + }); + + cliArgs.push('--', command); + + if (args && args.length > 0) { + cliArgs.push(...args); + } + + const respond = createCliResponder(res); + const proc = spawn('codex', cliArgs, { stdio: ['pipe', 'pipe', 'pipe'] }); + + let stdout = ''; + let stderr = ''; + + proc.stdout?.on('data', (data) => { stdout += data.toString(); }); + proc.stderr?.on('data', (data) => { stderr += data.toString(); }); + + proc.on('close', (code) => { + if (code === 0) { + respond(200, { success: true, output: stdout, message: `MCP server "${name}" added successfully` }); + } else { + respond(400, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` }); + } + }); + + proc.on('error', (error) => { + const isMissing = error?.code === 'ENOENT'; + respond(isMissing ? 503 : 500, { + error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI', + details: error.message, + code: error.code + }); + }); + } catch (error) { + res.status(500).json({ error: 'Failed to add MCP server', details: error.message }); + } +}); + +router.delete('/mcp/cli/remove/:name', async (req, res) => { + try { + const { name } = req.params; + + const respond = createCliResponder(res); + const proc = spawn('codex', ['mcp', 'remove', name], { stdio: ['pipe', 'pipe', 'pipe'] }); + + let stdout = ''; + let stderr = ''; + + proc.stdout?.on('data', (data) => { stdout += data.toString(); }); + proc.stderr?.on('data', (data) => { stderr += data.toString(); }); + + proc.on('close', (code) => { + if (code === 0) { + respond(200, { success: true, output: stdout, message: `MCP server "${name}" removed successfully` }); + } else { + respond(400, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` }); + } + }); + + proc.on('error', (error) => { + const isMissing = error?.code === 'ENOENT'; + respond(isMissing ? 503 : 500, { + error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI', + details: error.message, + code: error.code + }); + }); + } catch (error) { + res.status(500).json({ error: 'Failed to remove MCP server', details: error.message }); + } +}); + +router.get('/mcp/cli/get/:name', async (req, res) => { + try { + const { name } = req.params; + + const respond = createCliResponder(res); + const proc = spawn('codex', ['mcp', 'get', name], { stdio: ['pipe', 'pipe', 'pipe'] }); + + let stdout = ''; + let stderr = ''; + + proc.stdout?.on('data', (data) => { stdout += data.toString(); }); + proc.stderr?.on('data', (data) => { stderr += data.toString(); }); + + proc.on('close', (code) => { + if (code === 0) { + respond(200, { success: true, output: stdout, server: parseCodexGetOutput(stdout) }); + } else { + respond(404, { error: 'Codex CLI command failed', details: stderr || `Exited with code ${code}` }); + } + }); + + proc.on('error', (error) => { + const isMissing = error?.code === 'ENOENT'; + respond(isMissing ? 503 : 500, { + error: isMissing ? 'Codex CLI not installed' : 'Failed to run Codex CLI', + details: error.message, + code: error.code + }); + }); + } catch (error) { + res.status(500).json({ error: 'Failed to get MCP server details', details: error.message }); + } +}); + +router.get('/mcp/config/read', async (req, res) => { + try { + const configPath = path.join(os.homedir(), '.codex', 'config.toml'); + + let configData = null; + + try { + const fileContent = await fs.readFile(configPath, 'utf8'); + configData = TOML.parse(fileContent); + } catch (error) { + // Config file doesn't exist + } + + if (!configData) { + return res.json({ success: true, configPath, servers: [] }); } + + const servers = []; + + if (configData.mcp_servers && typeof configData.mcp_servers === 'object') { + for (const [name, config] of Object.entries(configData.mcp_servers)) { + servers.push({ + id: name, + name: name, + type: 'stdio', + scope: 'user', + config: { + command: config.command || '', + args: config.args || [], + env: config.env || {} + }, + raw: config + }); + } + } + + res.json({ success: true, configPath, servers }); + } catch (error) { + res.status(500).json({ error: 'Failed to read Codex configuration', details: error.message }); + } +}); + +function parseCodexListOutput(output) { + const servers = []; + const lines = output.split('\n').filter(line => line.trim()); + + for (const line of lines) { + if (line.includes(':')) { + const colonIndex = line.indexOf(':'); + const name = line.substring(0, colonIndex).trim(); + + if (!name) continue; + + const rest = line.substring(colonIndex + 1).trim(); + let description = rest; + let status = 'unknown'; + + if (rest.includes('✓') || rest.includes('✗')) { + const statusMatch = rest.match(/(.*?)\s*-\s*([✓✗].*)$/); + if (statusMatch) { + description = statusMatch[1].trim(); + status = statusMatch[2].includes('✓') ? 'connected' : 'failed'; + } + } + + servers.push({ name, type: 'stdio', status, description }); + } + } + + return servers; +} + +function parseCodexGetOutput(output) { + try { + const jsonMatch = output.match(/\{[\s\S]*\}/); + if (jsonMatch) { + return JSON.parse(jsonMatch[0]); + } + + const server = { raw_output: output }; + const lines = output.split('\n'); + + for (const line of lines) { + if (line.includes('Name:')) server.name = line.split(':')[1]?.trim(); + else if (line.includes('Type:')) server.type = line.split(':')[1]?.trim(); + else if (line.includes('Command:')) server.command = line.split(':')[1]?.trim(); + } + + return server; + } catch (error) { + return { raw_output: output, parse_error: error.message }; + } +} + +export default router; diff --git a/server/src/modules/commands/commands.routes.js b/server/src/modules/commands/commands.routes.js new file mode 100644 index 00000000..5d1a1f6f --- /dev/null +++ b/server/src/modules/commands/commands.routes.js @@ -0,0 +1,601 @@ +import express from 'express'; +import { promises as fs } from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import os from 'os'; +import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../../../shared/modelConstants.js'; +import { parseFrontmatter } from '../../../utils/frontmatter.js'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const router = express.Router(); + +/** + * Recursively scan directory for command files (.md) + * @param {string} dir - Directory to scan + * @param {string} baseDir - Base directory for relative paths + * @param {string} namespace - Namespace for commands (e.g., 'project', 'user') + * @returns {Promise} Array of command objects + */ +async function scanCommandsDirectory(dir, baseDir, namespace) { + const commands = []; + + try { + // Check if directory exists + await fs.access(dir); + + const entries = await fs.readdir(dir, { withFileTypes: true }); + + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + // Recursively scan subdirectories + const subCommands = await scanCommandsDirectory(fullPath, baseDir, namespace); + commands.push(...subCommands); + } else if (entry.isFile() && entry.name.endsWith('.md')) { + // Parse markdown file for metadata + try { + const content = await fs.readFile(fullPath, 'utf8'); + const { data: frontmatter, content: commandContent } = parseFrontmatter(content); + + // Calculate relative path from baseDir for command name + const relativePath = path.relative(baseDir, fullPath); + // Remove .md extension and convert to command name + const commandName = '/' + relativePath.replace(/\.md$/, '').replace(/\\/g, '/'); + + // Extract description from frontmatter or first line of content + let description = frontmatter.description || ''; + if (!description) { + const firstLine = commandContent.trim().split('\n')[0]; + description = firstLine.replace(/^#+\s*/, '').trim(); + } + + commands.push({ + name: commandName, + path: fullPath, + relativePath, + description, + namespace, + metadata: frontmatter + }); + } catch (err) { + console.error(`Error parsing command file ${fullPath}:`, err.message); + } + } + } + } catch (err) { + // Directory doesn't exist or can't be accessed - this is okay + if (err.code !== 'ENOENT' && err.code !== 'EACCES') { + console.error(`Error scanning directory ${dir}:`, err.message); + } + } + + return commands; +} + +/** + * Built-in commands that are always available + */ +const builtInCommands = [ + { + name: '/help', + description: 'Show help documentation for Claude Code', + namespace: 'builtin', + metadata: { type: 'builtin' } + }, + { + name: '/clear', + description: 'Clear the conversation history', + namespace: 'builtin', + metadata: { type: 'builtin' } + }, + { + name: '/model', + description: 'Switch or view the current AI model', + namespace: 'builtin', + metadata: { type: 'builtin' } + }, + { + name: '/cost', + description: 'Display token usage and cost information', + namespace: 'builtin', + metadata: { type: 'builtin' } + }, + { + name: '/memory', + description: 'Open CLAUDE.md memory file for editing', + namespace: 'builtin', + metadata: { type: 'builtin' } + }, + { + name: '/config', + description: 'Open settings and configuration', + namespace: 'builtin', + metadata: { type: 'builtin' } + }, + { + name: '/status', + description: 'Show system status and version information', + namespace: 'builtin', + metadata: { type: 'builtin' } + }, + { + name: '/rewind', + description: 'Rewind the conversation to a previous state', + namespace: 'builtin', + metadata: { type: 'builtin' } + } +]; + +/** + * Built-in command handlers + * Each handler returns { type: 'builtin', action: string, data: any } + */ +const builtInHandlers = { + '/help': async (args, context) => { + const helpText = `# Claude Code Commands + +## Built-in Commands + +${builtInCommands.map(cmd => `### ${cmd.name} +${cmd.description} +`).join('\n')} + +## Custom Commands + +Custom commands can be created in: +- Project: \`.claude/commands/\` (project-specific) +- User: \`~/.claude/commands/\` (available in all projects) + +### Command Syntax + +- **Arguments**: Use \`$ARGUMENTS\` for all args or \`$1\`, \`$2\`, etc. for positional +- **File Includes**: Use \`@filename\` to include file contents +- **Bash Commands**: Use \`!command\` to execute bash commands + +### Examples + +\`\`\`markdown +/mycommand arg1 arg2 +\`\`\` +`; + + return { + type: 'builtin', + action: 'help', + data: { + content: helpText, + format: 'markdown' + } + }; + }, + + '/clear': async (args, context) => { + return { + type: 'builtin', + action: 'clear', + data: { + message: 'Conversation history cleared' + } + }; + }, + + '/model': async (args, context) => { + // Read available models from centralized constants + const availableModels = { + claude: CLAUDE_MODELS.OPTIONS.map(o => o.value), + cursor: CURSOR_MODELS.OPTIONS.map(o => o.value), + codex: CODEX_MODELS.OPTIONS.map(o => o.value) + }; + + const currentProvider = context?.provider || 'claude'; + const currentModel = context?.model || CLAUDE_MODELS.DEFAULT; + + return { + type: 'builtin', + action: 'model', + data: { + current: { + provider: currentProvider, + model: currentModel + }, + available: availableModels, + message: args.length > 0 + ? `Switching to model: ${args[0]}` + : `Current model: ${currentModel}` + } + }; + }, + + '/cost': async (args, context) => { + const tokenUsage = context?.tokenUsage || {}; + const provider = context?.provider || 'claude'; + const model = + context?.model || + (provider === 'cursor' + ? CURSOR_MODELS.DEFAULT + : provider === 'codex' + ? CODEX_MODELS.DEFAULT + : CLAUDE_MODELS.DEFAULT); + + const used = Number(tokenUsage.used ?? tokenUsage.totalUsed ?? tokenUsage.total_tokens ?? 0) || 0; + const total = + Number( + tokenUsage.total ?? + tokenUsage.contextWindow ?? + parseInt(process.env.CONTEXT_WINDOW || '160000', 10), + ) || 160000; + const percentage = total > 0 ? Number(((used / total) * 100).toFixed(1)) : 0; + + const inputTokensRaw = + Number( + tokenUsage.inputTokens ?? + tokenUsage.input ?? + tokenUsage.cumulativeInputTokens ?? + tokenUsage.promptTokens ?? + 0, + ) || 0; + const outputTokens = + Number( + tokenUsage.outputTokens ?? + tokenUsage.output ?? + tokenUsage.cumulativeOutputTokens ?? + tokenUsage.completionTokens ?? + 0, + ) || 0; + const cacheTokens = + Number( + tokenUsage.cacheReadTokens ?? + tokenUsage.cacheCreationTokens ?? + tokenUsage.cacheTokens ?? + tokenUsage.cachedTokens ?? + 0, + ) || 0; + + // If we only have total used tokens, treat them as input for display/estimation. + const inputTokens = + inputTokensRaw > 0 || outputTokens > 0 || cacheTokens > 0 ? inputTokensRaw + cacheTokens : used; + + // Rough default rates by provider (USD / 1M tokens). + const pricingByProvider = { + claude: { input: 3, output: 15 }, + cursor: { input: 3, output: 15 }, + codex: { input: 1.5, output: 6 }, + }; + const rates = pricingByProvider[provider] || pricingByProvider.claude; + + const inputCost = (inputTokens / 1_000_000) * rates.input; + const outputCost = (outputTokens / 1_000_000) * rates.output; + const totalCost = inputCost + outputCost; + + return { + type: 'builtin', + action: 'cost', + data: { + tokenUsage: { + used, + total, + percentage, + }, + cost: { + input: inputCost.toFixed(4), + output: outputCost.toFixed(4), + total: totalCost.toFixed(4), + }, + model, + }, + }; + }, + + '/status': async (args, context) => { + // Read version from package.json + const packageJsonPath = path.join(path.dirname(__dirname), '..', 'package.json'); + let version = 'unknown'; + let packageName = 'claude-code-ui'; + + try { + const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8')); + version = packageJson.version; + packageName = packageJson.name; + } catch (err) { + console.error('Error reading package.json:', err); + } + + const uptime = process.uptime(); + const uptimeMinutes = Math.floor(uptime / 60); + const uptimeHours = Math.floor(uptimeMinutes / 60); + const uptimeFormatted = uptimeHours > 0 + ? `${uptimeHours}h ${uptimeMinutes % 60}m` + : `${uptimeMinutes}m`; + + return { + type: 'builtin', + action: 'status', + data: { + version, + packageName, + uptime: uptimeFormatted, + uptimeSeconds: Math.floor(uptime), + model: context?.model || 'claude-sonnet-4.5', + provider: context?.provider || 'claude', + nodeVersion: process.version, + platform: process.platform + } + }; + }, + + '/memory': async (args, context) => { + const projectPath = context?.projectPath; + + if (!projectPath) { + return { + type: 'builtin', + action: 'memory', + data: { + error: 'No project selected', + message: 'Please select a project to access its CLAUDE.md file' + } + }; + } + + const claudeMdPath = path.join(projectPath, 'CLAUDE.md'); + + // Check if CLAUDE.md exists + let exists = false; + try { + await fs.access(claudeMdPath); + exists = true; + } catch (err) { + // File doesn't exist + } + + return { + type: 'builtin', + action: 'memory', + data: { + path: claudeMdPath, + exists, + message: exists + ? `Opening CLAUDE.md at ${claudeMdPath}` + : `CLAUDE.md not found at ${claudeMdPath}. Create it to store project-specific instructions.` + } + }; + }, + + '/config': async (args, context) => { + return { + type: 'builtin', + action: 'config', + data: { + message: 'Opening settings...' + } + }; + }, + + '/rewind': async (args, context) => { + const steps = args[0] ? parseInt(args[0]) : 1; + + if (isNaN(steps) || steps < 1) { + return { + type: 'builtin', + action: 'rewind', + data: { + error: 'Invalid steps parameter', + message: 'Usage: /rewind [number] - Rewind conversation by N steps (default: 1)' + } + }; + } + + return { + type: 'builtin', + action: 'rewind', + data: { + steps, + message: `Rewinding conversation by ${steps} step${steps > 1 ? 's' : ''}...` + } + }; + } +}; + +/** + * POST /api/commands/list + * List all available commands from project and user directories + */ +router.post('/list', async (req, res) => { + try { + const { projectPath } = req.body; + const allCommands = [...builtInCommands]; + + // Scan project-level commands (.claude/commands/) + if (projectPath) { + const projectCommandsDir = path.join(projectPath, '.claude', 'commands'); + const projectCommands = await scanCommandsDirectory( + projectCommandsDir, + projectCommandsDir, + 'project' + ); + allCommands.push(...projectCommands); + } + + // Scan user-level commands (~/.claude/commands/) + const homeDir = os.homedir(); + const userCommandsDir = path.join(homeDir, '.claude', 'commands'); + const userCommands = await scanCommandsDirectory( + userCommandsDir, + userCommandsDir, + 'user' + ); + allCommands.push(...userCommands); + + // Separate built-in and custom commands + const customCommands = allCommands.filter(cmd => cmd.namespace !== 'builtin'); + + // Sort commands alphabetically by name + customCommands.sort((a, b) => a.name.localeCompare(b.name)); + + res.json({ + builtIn: builtInCommands, + custom: customCommands, + count: allCommands.length + }); + } catch (error) { + console.error('Error listing commands:', error); + res.status(500).json({ + error: 'Failed to list commands', + message: error.message + }); + } +}); + +/** + * POST /api/commands/load + * Load a specific command file and return its content and metadata + */ +router.post('/load', async (req, res) => { + try { + const { commandPath } = req.body; + + if (!commandPath) { + return res.status(400).json({ + error: 'Command path is required' + }); + } + + // Security: Prevent path traversal + const resolvedPath = path.resolve(commandPath); + if (!resolvedPath.startsWith(path.resolve(os.homedir())) && + !resolvedPath.includes('.claude/commands')) { + return res.status(403).json({ + error: 'Access denied', + message: 'Command must be in .claude/commands directory' + }); + } + + // Read and parse the command file + const content = await fs.readFile(commandPath, 'utf8'); + const { data: metadata, content: commandContent } = parseFrontmatter(content); + + res.json({ + path: commandPath, + metadata, + content: commandContent + }); + } catch (error) { + if (error.code === 'ENOENT') { + return res.status(404).json({ + error: 'Command not found', + message: `Command file not found: ${req.body.commandPath}` + }); + } + + console.error('Error loading command:', error); + res.status(500).json({ + error: 'Failed to load command', + message: error.message + }); + } +}); + +/** + * POST /api/commands/execute + * Execute a command with argument replacement + * This endpoint prepares the command content but doesn't execute bash commands yet + * (that will be handled in the command parser utility) + */ +router.post('/execute', async (req, res) => { + try { + const { commandName, commandPath, args = [], context = {} } = req.body; + + if (!commandName) { + return res.status(400).json({ + error: 'Command name is required' + }); + } + + // Handle built-in commands + const handler = builtInHandlers[commandName]; + if (handler) { + try { + const result = await handler(args, context); + return res.json({ + ...result, + command: commandName + }); + } catch (error) { + console.error(`Error executing built-in command ${commandName}:`, error); + return res.status(500).json({ + error: 'Command execution failed', + message: error.message, + command: commandName + }); + } + } + + // Handle custom commands + if (!commandPath) { + return res.status(400).json({ + error: 'Command path is required for custom commands' + }); + } + + // Load command content + // Security: validate commandPath is within allowed directories + { + const resolvedPath = path.resolve(commandPath); + const userBase = path.resolve(path.join(os.homedir(), '.claude', 'commands')); + const projectBase = context?.projectPath + ? path.resolve(path.join(context.projectPath, '.claude', 'commands')) + : null; + const isUnder = (base) => { + const rel = path.relative(base, resolvedPath); + return rel !== '' && !rel.startsWith('..') && !path.isAbsolute(rel); + }; + if (!(isUnder(userBase) || (projectBase && isUnder(projectBase)))) { + return res.status(403).json({ + error: 'Access denied', + message: 'Command must be in .claude/commands directory' + }); + } + } + const content = await fs.readFile(commandPath, 'utf8'); + const { data: metadata, content: commandContent } = parseFrontmatter(content); + // Basic argument replacement (will be enhanced in command parser utility) + let processedContent = commandContent; + + // Replace $ARGUMENTS with all arguments joined + const argsString = args.join(' '); + processedContent = processedContent.replace(/\$ARGUMENTS/g, argsString); + + // Replace $1, $2, etc. with positional arguments + args.forEach((arg, index) => { + const placeholder = `$${index + 1}`; + processedContent = processedContent.replace(new RegExp(`\\${placeholder}\\b`, 'g'), arg); + }); + + res.json({ + type: 'custom', + command: commandName, + content: processedContent, + metadata, + hasFileIncludes: processedContent.includes('@'), + hasBashCommands: processedContent.includes('!') + }); + } catch (error) { + if (error.code === 'ENOENT') { + return res.status(404).json({ + error: 'Command not found', + message: `Command file not found: ${req.body.commandPath}` + }); + } + + console.error('Error executing command:', error); + res.status(500).json({ + error: 'Failed to execute command', + message: error.message + }); + } +}); + +export default router; diff --git a/server/src/modules/credentials/credentials.routes.js b/server/src/modules/credentials/credentials.routes.js new file mode 100644 index 00000000..7a177f6b --- /dev/null +++ b/server/src/modules/credentials/credentials.routes.js @@ -0,0 +1,98 @@ +import express from 'express'; +import { credentialsDb } from '../../../database/db.js'; + +const router = express.Router(); + +// =============================== +// Generic Credentials Management +// =============================== + +// Get all credentials for the authenticated user (optionally filtered by type) +router.get('/', async (req, res) => { + try { + const { type } = req.query; + const credentials = credentialsDb.getCredentials(req.user.id, type || null); + // Don't send the actual credential values for security + res.json({ credentials }); + } catch (error) { + console.error('Error fetching credentials:', error); + res.status(500).json({ error: 'Failed to fetch credentials' }); + } +}); + +// Create a new credential +router.post('/', async (req, res) => { + try { + const { credentialName, credentialType, credentialValue, description } = req.body; + + if (!credentialName || !credentialName.trim()) { + return res.status(400).json({ error: 'Credential name is required' }); + } + + if (!credentialType || !credentialType.trim()) { + return res.status(400).json({ error: 'Credential type is required' }); + } + + if (!credentialValue || !credentialValue.trim()) { + return res.status(400).json({ error: 'Credential value is required' }); + } + + const result = credentialsDb.createCredential( + req.user.id, + credentialName.trim(), + credentialType.trim(), + credentialValue.trim(), + description?.trim() || null + ); + + res.json({ + success: true, + credential: result + }); + } catch (error) { + console.error('Error creating credential:', error); + res.status(500).json({ error: 'Failed to create credential' }); + } +}); + +// Delete a credential +router.delete('/:credentialId', async (req, res) => { + try { + const { credentialId } = req.params; + const success = credentialsDb.deleteCredential(req.user.id, parseInt(credentialId)); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'Credential not found' }); + } + } catch (error) { + console.error('Error deleting credential:', error); + res.status(500).json({ error: 'Failed to delete credential' }); + } +}); + +// Toggle credential active status +router.patch('/:credentialId/toggle', async (req, res) => { + try { + const { credentialId } = req.params; + const { isActive } = req.body; + + if (typeof isActive !== 'boolean') { + return res.status(400).json({ error: 'isActive must be a boolean' }); + } + + const success = credentialsDb.toggleCredential(req.user.id, parseInt(credentialId), isActive); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'Credential not found' }); + } + } catch (error) { + console.error('Error toggling credential:', error); + res.status(500).json({ error: 'Failed to toggle credential' }); + } +}); + +export default router; diff --git a/server/src/modules/cursor/cursor.routes.js b/server/src/modules/cursor/cursor.routes.js new file mode 100644 index 00000000..8f618611 --- /dev/null +++ b/server/src/modules/cursor/cursor.routes.js @@ -0,0 +1,798 @@ +import express from 'express'; +import { promises as fs } from 'fs'; +import path from 'path'; +import os from 'os'; +import { spawn } from 'child_process'; +import sqlite3 from 'sqlite3'; +import { open } from 'sqlite'; +import crypto from 'crypto'; +import { CURSOR_MODELS } from '../../../../shared/modelConstants.js'; +import { applyCustomSessionNames } from '../../../database/db.js'; + +const router = express.Router(); + +// GET /api/cursor/config - Read Cursor CLI configuration +router.get('/config', async (req, res) => { + try { + const configPath = path.join(os.homedir(), '.cursor', 'cli-config.json'); + + try { + const configContent = await fs.readFile(configPath, 'utf8'); + const config = JSON.parse(configContent); + + res.json({ + success: true, + config: config, + path: configPath + }); + } catch (error) { + // Config doesn't exist or is invalid + console.log('Cursor config not found or invalid:', error.message); + + // Return default config + res.json({ + success: true, + config: { + version: 1, + model: { + modelId: CURSOR_MODELS.DEFAULT, + displayName: "GPT-5" + }, + permissions: { + allow: [], + deny: [] + } + }, + isDefault: true + }); + } + } catch (error) { + console.error('Error reading Cursor config:', error); + res.status(500).json({ + error: 'Failed to read Cursor configuration', + details: error.message + }); + } +}); + +// POST /api/cursor/config - Update Cursor CLI configuration +router.post('/config', async (req, res) => { + try { + const { permissions, model } = req.body; + const configPath = path.join(os.homedir(), '.cursor', 'cli-config.json'); + + // Read existing config or create default + let config = { + version: 1, + editor: { + vimMode: false + }, + hasChangedDefaultModel: false, + privacyCache: { + ghostMode: false, + privacyMode: 3, + updatedAt: Date.now() + } + }; + + try { + const existing = await fs.readFile(configPath, 'utf8'); + config = JSON.parse(existing); + } catch (error) { + // Config doesn't exist, use defaults + console.log('Creating new Cursor config'); + } + + // Update permissions if provided + if (permissions) { + config.permissions = { + allow: permissions.allow || [], + deny: permissions.deny || [] + }; + } + + // Update model if provided + if (model) { + config.model = model; + config.hasChangedDefaultModel = true; + } + + // Ensure directory exists + const configDir = path.dirname(configPath); + await fs.mkdir(configDir, { recursive: true }); + + // Write updated config + await fs.writeFile(configPath, JSON.stringify(config, null, 2)); + + res.json({ + success: true, + config: config, + message: 'Cursor configuration updated successfully' + }); + } catch (error) { + console.error('Error updating Cursor config:', error); + res.status(500).json({ + error: 'Failed to update Cursor configuration', + details: error.message + }); + } +}); + +// GET /api/cursor/mcp - Read Cursor MCP servers configuration +router.get('/mcp', async (req, res) => { + try { + const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json'); + + try { + const mcpContent = await fs.readFile(mcpPath, 'utf8'); + const mcpConfig = JSON.parse(mcpContent); + + // Convert to UI-friendly format + const servers = []; + if (mcpConfig.mcpServers && typeof mcpConfig.mcpServers === 'object') { + for (const [name, config] of Object.entries(mcpConfig.mcpServers)) { + const server = { + id: name, + name: name, + type: 'stdio', + scope: 'cursor', + config: {}, + raw: config + }; + + // Determine transport type and extract config + if (config.command) { + server.type = 'stdio'; + server.config.command = config.command; + server.config.args = config.args || []; + server.config.env = config.env || {}; + } else if (config.url) { + server.type = config.transport || 'http'; + server.config.url = config.url; + server.config.headers = config.headers || {}; + } + + servers.push(server); + } + } + + res.json({ + success: true, + servers: servers, + path: mcpPath + }); + } catch (error) { + // MCP config doesn't exist + console.log('Cursor MCP config not found:', error.message); + res.json({ + success: true, + servers: [], + isDefault: true + }); + } + } catch (error) { + console.error('Error reading Cursor MCP config:', error); + res.status(500).json({ + error: 'Failed to read Cursor MCP configuration', + details: error.message + }); + } +}); + +// POST /api/cursor/mcp/add - Add MCP server to Cursor configuration +router.post('/mcp/add', async (req, res) => { + try { + const { name, type = 'stdio', command, args = [], url, headers = {}, env = {} } = req.body; + const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json'); + + console.log(`➕ Adding MCP server to Cursor config: ${name}`); + + // Read existing config or create new + let mcpConfig = { mcpServers: {} }; + + try { + const existing = await fs.readFile(mcpPath, 'utf8'); + mcpConfig = JSON.parse(existing); + if (!mcpConfig.mcpServers) { + mcpConfig.mcpServers = {}; + } + } catch (error) { + console.log('Creating new Cursor MCP config'); + } + + // Build server config based on type + let serverConfig = {}; + + if (type === 'stdio') { + serverConfig = { + command: command, + args: args, + env: env + }; + } else if (type === 'http' || type === 'sse') { + serverConfig = { + url: url, + transport: type, + headers: headers + }; + } + + // Add server to config + mcpConfig.mcpServers[name] = serverConfig; + + // Ensure directory exists + const mcpDir = path.dirname(mcpPath); + await fs.mkdir(mcpDir, { recursive: true }); + + // Write updated config + await fs.writeFile(mcpPath, JSON.stringify(mcpConfig, null, 2)); + + res.json({ + success: true, + message: `MCP server "${name}" added to Cursor configuration`, + config: mcpConfig + }); + } catch (error) { + console.error('Error adding MCP server to Cursor:', error); + res.status(500).json({ + error: 'Failed to add MCP server', + details: error.message + }); + } +}); + +// DELETE /api/cursor/mcp/:name - Remove MCP server from Cursor configuration +router.delete('/mcp/:name', async (req, res) => { + try { + const { name } = req.params; + const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json'); + + console.log(`🗑️ Removing MCP server from Cursor config: ${name}`); + + // Read existing config + let mcpConfig = { mcpServers: {} }; + + try { + const existing = await fs.readFile(mcpPath, 'utf8'); + mcpConfig = JSON.parse(existing); + } catch (error) { + return res.status(404).json({ + error: 'Cursor MCP configuration not found' + }); + } + + // Check if server exists + if (!mcpConfig.mcpServers || !mcpConfig.mcpServers[name]) { + return res.status(404).json({ + error: `MCP server "${name}" not found in Cursor configuration` + }); + } + + // Remove server from config + delete mcpConfig.mcpServers[name]; + + // Write updated config + await fs.writeFile(mcpPath, JSON.stringify(mcpConfig, null, 2)); + + res.json({ + success: true, + message: `MCP server "${name}" removed from Cursor configuration`, + config: mcpConfig + }); + } catch (error) { + console.error('Error removing MCP server from Cursor:', error); + res.status(500).json({ + error: 'Failed to remove MCP server', + details: error.message + }); + } +}); + +// POST /api/cursor/mcp/add-json - Add MCP server using JSON format +router.post('/mcp/add-json', async (req, res) => { + try { + const { name, jsonConfig } = req.body; + const mcpPath = path.join(os.homedir(), '.cursor', 'mcp.json'); + + console.log(`➕ Adding MCP server to Cursor config via JSON: ${name}`); + + // Validate and parse JSON config + let parsedConfig; + try { + parsedConfig = typeof jsonConfig === 'string' ? JSON.parse(jsonConfig) : jsonConfig; + } catch (parseError) { + return res.status(400).json({ + error: 'Invalid JSON configuration', + details: parseError.message + }); + } + + // Read existing config or create new + let mcpConfig = { mcpServers: {} }; + + try { + const existing = await fs.readFile(mcpPath, 'utf8'); + mcpConfig = JSON.parse(existing); + if (!mcpConfig.mcpServers) { + mcpConfig.mcpServers = {}; + } + } catch (error) { + console.log('Creating new Cursor MCP config'); + } + + // Add server to config + mcpConfig.mcpServers[name] = parsedConfig; + + // Ensure directory exists + const mcpDir = path.dirname(mcpPath); + await fs.mkdir(mcpDir, { recursive: true }); + + // Write updated config + await fs.writeFile(mcpPath, JSON.stringify(mcpConfig, null, 2)); + + res.json({ + success: true, + message: `MCP server "${name}" added to Cursor configuration via JSON`, + config: mcpConfig + }); + } catch (error) { + console.error('Error adding MCP server to Cursor via JSON:', error); + res.status(500).json({ + error: 'Failed to add MCP server', + details: error.message + }); + } +}); + +// GET /api/cursor/sessions - Get Cursor sessions from SQLite database +router.get('/sessions', async (req, res) => { + try { + const { projectPath } = req.query; + + // Calculate cwdID hash for the project path (Cursor uses MD5 hash) + const cwdId = crypto.createHash('md5').update(projectPath || process.cwd()).digest('hex'); + const cursorChatsPath = path.join(os.homedir(), '.cursor', 'chats', cwdId); + + + // Check if the directory exists + try { + await fs.access(cursorChatsPath); + } catch (error) { + // No sessions for this project + return res.json({ + success: true, + sessions: [], + cwdId: cwdId, + path: cursorChatsPath + }); + } + + // List all session directories + const sessionDirs = await fs.readdir(cursorChatsPath); + const sessions = []; + + for (const sessionId of sessionDirs) { + const sessionPath = path.join(cursorChatsPath, sessionId); + const storeDbPath = path.join(sessionPath, 'store.db'); + let dbStatMtimeMs = null; + + try { + // Check if store.db exists + await fs.access(storeDbPath); + + // Capture store.db mtime as a reliable fallback timestamp (last activity) + try { + const stat = await fs.stat(storeDbPath); + dbStatMtimeMs = stat.mtimeMs; + } catch (_) {} + + // Open SQLite database + const db = await open({ + filename: storeDbPath, + driver: sqlite3.Database, + mode: sqlite3.OPEN_READONLY + }); + + // Get metadata from meta table + const metaRows = await db.all(` + SELECT key, value FROM meta + `); + + let sessionData = { + id: sessionId, + name: 'Untitled Session', + createdAt: null, + mode: null, + projectPath: projectPath, + lastMessage: null, + messageCount: 0 + }; + + // Parse meta table entries + for (const row of metaRows) { + if (row.value) { + try { + // Try to decode as hex-encoded JSON + const hexMatch = row.value.toString().match(/^[0-9a-fA-F]+$/); + if (hexMatch) { + const jsonStr = Buffer.from(row.value, 'hex').toString('utf8'); + const data = JSON.parse(jsonStr); + + if (row.key === 'agent') { + sessionData.name = data.name || sessionData.name; + // Normalize createdAt to ISO string in milliseconds + let createdAt = data.createdAt; + if (typeof createdAt === 'number') { + if (createdAt < 1e12) { + createdAt = createdAt * 1000; // seconds -> ms + } + sessionData.createdAt = new Date(createdAt).toISOString(); + } else if (typeof createdAt === 'string') { + const n = Number(createdAt); + if (!Number.isNaN(n)) { + const ms = n < 1e12 ? n * 1000 : n; + sessionData.createdAt = new Date(ms).toISOString(); + } else { + // Assume it's already an ISO/date string + const d = new Date(createdAt); + sessionData.createdAt = isNaN(d.getTime()) ? null : d.toISOString(); + } + } else { + sessionData.createdAt = sessionData.createdAt || null; + } + sessionData.mode = data.mode; + sessionData.agentId = data.agentId; + sessionData.latestRootBlobId = data.latestRootBlobId; + } + } else { + // If not hex, use raw value for simple keys + if (row.key === 'name') { + sessionData.name = row.value.toString(); + } + } + } catch (e) { + console.log(`Could not parse meta value for key ${row.key}:`, e.message); + } + } + } + + // Get message count from JSON blobs only (actual messages, not DAG structure) + try { + const blobCount = await db.get(` + SELECT COUNT(*) as count + FROM blobs + WHERE substr(data, 1, 1) = X'7B' + `); + sessionData.messageCount = blobCount.count; + + // Get the most recent JSON blob for preview (actual message, not DAG structure) + const lastBlob = await db.get(` + SELECT data FROM blobs + WHERE substr(data, 1, 1) = X'7B' + ORDER BY rowid DESC + LIMIT 1 + `); + + if (lastBlob && lastBlob.data) { + try { + // Try to extract readable preview from blob (may contain binary with embedded JSON) + const raw = lastBlob.data.toString('utf8'); + let preview = ''; + // Attempt direct JSON parse + try { + const parsed = JSON.parse(raw); + if (parsed?.content) { + if (Array.isArray(parsed.content)) { + const firstText = parsed.content.find(p => p?.type === 'text' && p.text)?.text || ''; + preview = firstText; + } else if (typeof parsed.content === 'string') { + preview = parsed.content; + } + } + } catch (_) {} + if (!preview) { + // Strip non-printable and try to find JSON chunk + const cleaned = raw.replace(/[^\x09\x0A\x0D\x20-\x7E]/g, ''); + const s = cleaned; + const start = s.indexOf('{'); + const end = s.lastIndexOf('}'); + if (start !== -1 && end > start) { + const jsonStr = s.slice(start, end + 1); + try { + const parsed = JSON.parse(jsonStr); + if (parsed?.content) { + if (Array.isArray(parsed.content)) { + const firstText = parsed.content.find(p => p?.type === 'text' && p.text)?.text || ''; + preview = firstText; + } else if (typeof parsed.content === 'string') { + preview = parsed.content; + } + } + } catch (_) { + preview = s; + } + } else { + preview = s; + } + } + if (preview && preview.length > 0) { + sessionData.lastMessage = preview.substring(0, 100) + (preview.length > 100 ? '...' : ''); + } + } catch (e) { + console.log('Could not parse blob data:', e.message); + } + } + } catch (e) { + console.log('Could not read blobs:', e.message); + } + + await db.close(); + + // Finalize createdAt: use parsed meta value when valid, else fall back to store.db mtime + if (!sessionData.createdAt) { + if (dbStatMtimeMs && Number.isFinite(dbStatMtimeMs)) { + sessionData.createdAt = new Date(dbStatMtimeMs).toISOString(); + } + } + + sessions.push(sessionData); + + } catch (error) { + console.log(`Could not read session ${sessionId}:`, error.message); + } + } + + // Fallback: ensure createdAt is a valid ISO string (use session directory mtime as last resort) + for (const s of sessions) { + if (!s.createdAt) { + try { + const sessionDir = path.join(cursorChatsPath, s.id); + const st = await fs.stat(sessionDir); + s.createdAt = new Date(st.mtimeMs).toISOString(); + } catch { + s.createdAt = new Date().toISOString(); + } + } + } + // Sort sessions by creation date (newest first) + sessions.sort((a, b) => { + if (!a.createdAt) return 1; + if (!b.createdAt) return -1; + return new Date(b.createdAt) - new Date(a.createdAt); + }); + + applyCustomSessionNames(sessions, 'cursor'); + + res.json({ + success: true, + sessions: sessions, + cwdId: cwdId, + path: cursorChatsPath + }); + + } catch (error) { + console.error('Error reading Cursor sessions:', error); + res.status(500).json({ + error: 'Failed to read Cursor sessions', + details: error.message + }); + } +}); + +// GET /api/cursor/sessions/:sessionId - Get specific Cursor session from SQLite +router.get('/sessions/:sessionId', async (req, res) => { + try { + const { sessionId } = req.params; + const { projectPath } = req.query; + + // Calculate cwdID hash for the project path + const cwdId = crypto.createHash('md5').update(projectPath || process.cwd()).digest('hex'); + const storeDbPath = path.join(os.homedir(), '.cursor', 'chats', cwdId, sessionId, 'store.db'); + + + // Open SQLite database + const db = await open({ + filename: storeDbPath, + driver: sqlite3.Database, + mode: sqlite3.OPEN_READONLY + }); + + // Get all blobs to build the DAG structure + const allBlobs = await db.all(` + SELECT rowid, id, data FROM blobs + `); + + // Build the DAG structure from parent-child relationships + const blobMap = new Map(); // id -> blob data + const parentRefs = new Map(); // blob id -> [parent blob ids] + const childRefs = new Map(); // blob id -> [child blob ids] + const jsonBlobs = []; // Clean JSON messages + + for (const blob of allBlobs) { + blobMap.set(blob.id, blob); + + // Check if this is a JSON blob (actual message) or protobuf (DAG structure) + if (blob.data && blob.data[0] === 0x7B) { // Starts with '{' - JSON blob + try { + const parsed = JSON.parse(blob.data.toString('utf8')); + jsonBlobs.push({ ...blob, parsed }); + } catch (e) { + console.log('Failed to parse JSON blob:', blob.rowid); + } + } else if (blob.data) { // Protobuf blob - extract parent references + const parents = []; + let i = 0; + + // Scan for parent references (0x0A 0x20 followed by 32-byte hash) + while (i < blob.data.length - 33) { + if (blob.data[i] === 0x0A && blob.data[i+1] === 0x20) { + const parentHash = blob.data.slice(i+2, i+34).toString('hex'); + if (blobMap.has(parentHash)) { + parents.push(parentHash); + } + i += 34; + } else { + i++; + } + } + + if (parents.length > 0) { + parentRefs.set(blob.id, parents); + // Update child references + for (const parentId of parents) { + if (!childRefs.has(parentId)) { + childRefs.set(parentId, []); + } + childRefs.get(parentId).push(blob.id); + } + } + } + } + + // Perform topological sort to get chronological order + const visited = new Set(); + const sorted = []; + + // DFS-based topological sort + function visit(nodeId) { + if (visited.has(nodeId)) return; + visited.add(nodeId); + + // Visit all parents first (dependencies) + const parents = parentRefs.get(nodeId) || []; + for (const parentId of parents) { + visit(parentId); + } + + // Add this node after all its parents + const blob = blobMap.get(nodeId); + if (blob) { + sorted.push(blob); + } + } + + // Start with nodes that have no parents (roots) + for (const blob of allBlobs) { + if (!parentRefs.has(blob.id)) { + visit(blob.id); + } + } + + // Visit any remaining nodes (disconnected components) + for (const blob of allBlobs) { + visit(blob.id); + } + + // Now extract JSON messages in the order they appear in the sorted DAG + const messageOrder = new Map(); // JSON blob id -> order index + let orderIndex = 0; + + for (const blob of sorted) { + // Check if this blob references any JSON messages + if (blob.data && blob.data[0] !== 0x7B) { // Protobuf blob + // Look for JSON blob references + for (const jsonBlob of jsonBlobs) { + try { + const jsonIdBytes = Buffer.from(jsonBlob.id, 'hex'); + if (blob.data.includes(jsonIdBytes)) { + if (!messageOrder.has(jsonBlob.id)) { + messageOrder.set(jsonBlob.id, orderIndex++); + } + } + } catch (e) { + // Skip if can't convert ID + } + } + } + } + + // Sort JSON blobs by their appearance order in the DAG + const sortedJsonBlobs = jsonBlobs.sort((a, b) => { + const orderA = messageOrder.get(a.id) ?? Number.MAX_SAFE_INTEGER; + const orderB = messageOrder.get(b.id) ?? Number.MAX_SAFE_INTEGER; + if (orderA !== orderB) return orderA - orderB; + // Fallback to rowid if not in order map + return a.rowid - b.rowid; + }); + + // Use sorted JSON blobs + const blobs = sortedJsonBlobs.map((blob, idx) => ({ + ...blob, + sequence_num: idx + 1, + original_rowid: blob.rowid + })); + + // Get metadata from meta table + const metaRows = await db.all(` + SELECT key, value FROM meta + `); + + // Parse metadata + let metadata = {}; + for (const row of metaRows) { + if (row.value) { + try { + // Try to decode as hex-encoded JSON + const hexMatch = row.value.toString().match(/^[0-9a-fA-F]+$/); + if (hexMatch) { + const jsonStr = Buffer.from(row.value, 'hex').toString('utf8'); + metadata[row.key] = JSON.parse(jsonStr); + } else { + metadata[row.key] = row.value.toString(); + } + } catch (e) { + metadata[row.key] = row.value.toString(); + } + } + } + + // Extract messages from sorted JSON blobs + const messages = []; + for (const blob of blobs) { + try { + // We already parsed JSON blobs earlier + const parsed = blob.parsed; + + if (parsed) { + // Filter out ONLY system messages at the server level + // Check both direct role and nested message.role + const role = parsed?.role || parsed?.message?.role; + if (role === 'system') { + continue; // Skip only system messages + } + messages.push({ + id: blob.id, + sequence: blob.sequence_num, + rowid: blob.original_rowid, + content: parsed + }); + } + } catch (e) { + // Skip blobs that cause errors + console.log(`Skipping blob ${blob.id}: ${e.message}`); + } + } + + await db.close(); + + res.json({ + success: true, + session: { + id: sessionId, + projectPath: projectPath, + messages: messages, + metadata: metadata, + cwdId: cwdId + } + }); + + } catch (error) { + console.error('Error reading Cursor session:', error); + res.status(500).json({ + error: 'Failed to read Cursor session', + details: error.message + }); + } +}); + +export default router; \ No newline at end of file diff --git a/server/src/modules/gemini/gemini.routes.js b/server/src/modules/gemini/gemini.routes.js new file mode 100644 index 00000000..64408185 --- /dev/null +++ b/server/src/modules/gemini/gemini.routes.js @@ -0,0 +1,24 @@ +import express from 'express'; +import sessionManager from '../../../sessionManager.js'; +import { sessionNamesDb } from '../../../database/db.js'; + +const router = express.Router(); + +router.delete('/sessions/:sessionId', async (req, res) => { + try { + const { sessionId } = req.params; + + if (!sessionId || typeof sessionId !== 'string' || !/^[a-zA-Z0-9_.-]{1,100}$/.test(sessionId)) { + return res.status(400).json({ success: false, error: 'Invalid session ID format' }); + } + + await sessionManager.deleteSession(sessionId); + sessionNamesDb.deleteName(sessionId, 'gemini'); + res.json({ success: true }); + } catch (error) { + console.error(`Error deleting Gemini session ${req.params.sessionId}:`, error); + res.status(500).json({ success: false, error: error.message }); + } +}); + +export default router; diff --git a/server/src/modules/git/git.routes.js b/server/src/modules/git/git.routes.js new file mode 100644 index 00000000..d9660db2 --- /dev/null +++ b/server/src/modules/git/git.routes.js @@ -0,0 +1,1488 @@ +import express from 'express'; +import { spawn } from 'child_process'; +import path from 'path'; +import { promises as fs } from 'fs'; +import { extractProjectDirectory } from '../../../projects.js'; +import { queryClaudeSDK } from '../../../claude-sdk.js'; +import { spawnCursor } from '../../../cursor-cli.js'; + +const router = express.Router(); +const COMMIT_DIFF_CHARACTER_LIMIT = 500_000; + +function spawnAsync(command, args, options = {}) { + return new Promise((resolve, reject) => { + const child = spawn(command, args, { + ...options, + shell: false, + }); + + let stdout = ''; + let stderr = ''; + + child.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + child.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + child.on('error', (error) => { + reject(error); + }); + + child.on('close', (code) => { + if (code === 0) { + resolve({ stdout, stderr }); + return; + } + + const error = new Error(`Command failed: ${command} ${args.join(' ')}`); + error.code = code; + error.stdout = stdout; + error.stderr = stderr; + reject(error); + }); + }); +} + +// Input validation helpers (defense-in-depth) +function validateCommitRef(commit) { + // Allow hex hashes, HEAD, HEAD~N, HEAD^N, tag names, branch names + if (!/^[a-zA-Z0-9._~^{}@\/-]+$/.test(commit)) { + throw new Error('Invalid commit reference'); + } + return commit; +} + +function validateBranchName(branch) { + if (!/^[a-zA-Z0-9._\/-]+$/.test(branch)) { + throw new Error('Invalid branch name'); + } + return branch; +} + +function validateFilePath(file, projectPath) { + if (!file || file.includes('\0')) { + throw new Error('Invalid file path'); + } + // Prevent path traversal: resolve the file relative to the project root + // and ensure the result stays within the project directory + if (projectPath) { + const resolved = path.resolve(projectPath, file); + const normalizedRoot = path.resolve(projectPath) + path.sep; + if (!resolved.startsWith(normalizedRoot) && resolved !== path.resolve(projectPath)) { + throw new Error('Invalid file path: path traversal detected'); + } + } + return file; +} + +function validateRemoteName(remote) { + if (!/^[a-zA-Z0-9._-]+$/.test(remote)) { + throw new Error('Invalid remote name'); + } + return remote; +} + +function validateProjectPath(projectPath) { + if (!projectPath || projectPath.includes('\0')) { + throw new Error('Invalid project path'); + } + const resolved = path.resolve(projectPath); + // Must be an absolute path after resolution + if (!path.isAbsolute(resolved)) { + throw new Error('Invalid project path: must be absolute'); + } + // Block obviously dangerous paths + if (resolved === '/' || resolved === path.sep) { + throw new Error('Invalid project path: root directory not allowed'); + } + return resolved; +} + +// Helper function to get the actual project path from the encoded project name +async function getActualProjectPath(projectName) { + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + console.error(`Error extracting project directory for ${projectName}:`, error); + throw new Error(`Unable to resolve project path for "${projectName}"`); + } + return validateProjectPath(projectPath); +} + +// Helper function to strip git diff headers +function stripDiffHeaders(diff) { + if (!diff) return ''; + + const lines = diff.split('\n'); + const filteredLines = []; + let startIncluding = false; + + for (const line of lines) { + // Skip all header lines including diff --git, index, file mode, and --- / +++ file paths + if (line.startsWith('diff --git') || + line.startsWith('index ') || + line.startsWith('new file mode') || + line.startsWith('deleted file mode') || + line.startsWith('---') || + line.startsWith('+++')) { + continue; + } + + // Start including lines from @@ hunk headers onwards + if (line.startsWith('@@') || startIncluding) { + startIncluding = true; + filteredLines.push(line); + } + } + + return filteredLines.join('\n'); +} + +// Helper function to validate git repository +async function validateGitRepository(projectPath) { + try { + // Check if directory exists + await fs.access(projectPath); + } catch { + throw new Error(`Project path not found: ${projectPath}`); + } + + try { + // Allow any directory that is inside a work tree (repo root or nested folder). + const { stdout: insideWorkTreeOutput } = await spawnAsync('git', ['rev-parse', '--is-inside-work-tree'], { cwd: projectPath }); + const isInsideWorkTree = insideWorkTreeOutput.trim() === 'true'; + if (!isInsideWorkTree) { + throw new Error('Not inside a git work tree'); + } + + // Ensure git can resolve the repository root for this directory. + await spawnAsync('git', ['rev-parse', '--show-toplevel'], { cwd: projectPath }); + } catch { + throw new Error('Not a git repository. This directory does not contain a .git folder. Initialize a git repository with "git init" to use source control features.'); + } +} + +function getGitErrorDetails(error) { + return `${error?.message || ''} ${error?.stderr || ''} ${error?.stdout || ''}`; +} + +function isMissingHeadRevisionError(error) { + const errorDetails = getGitErrorDetails(error).toLowerCase(); + return errorDetails.includes('unknown revision') + || errorDetails.includes('ambiguous argument') + || errorDetails.includes('needed a single revision') + || errorDetails.includes('bad revision'); +} + +async function getCurrentBranchName(projectPath) { + try { + // symbolic-ref works even when the repository has no commits. + const { stdout } = await spawnAsync('git', ['symbolic-ref', '--short', 'HEAD'], { cwd: projectPath }); + const branchName = stdout.trim(); + if (branchName) { + return branchName; + } + } catch (error) { + // Fall back to rev-parse for detached HEAD and older git edge cases. + } + + const { stdout } = await spawnAsync('git', ['rev-parse', '--abbrev-ref', 'HEAD'], { cwd: projectPath }); + return stdout.trim(); +} + +async function repositoryHasCommits(projectPath) { + try { + await spawnAsync('git', ['rev-parse', '--verify', 'HEAD'], { cwd: projectPath }); + return true; + } catch (error) { + if (isMissingHeadRevisionError(error)) { + return false; + } + throw error; + } +} + +async function getRepositoryRootPath(projectPath) { + const { stdout } = await spawnAsync('git', ['rev-parse', '--show-toplevel'], { cwd: projectPath }); + return stdout.trim(); +} + +function normalizeRepositoryRelativeFilePath(filePath) { + return String(filePath) + .replace(/\\/g, '/') + .replace(/^\.\/+/, '') + .replace(/^\/+/, '') + .trim(); +} + +function parseStatusFilePaths(statusOutput) { + return statusOutput + .split('\n') + .map((line) => line.trimEnd()) + .filter((line) => line.trim()) + .map((line) => { + const statusPath = line.substring(3); + const renamedFilePath = statusPath.split(' -> ')[1]; + return normalizeRepositoryRelativeFilePath(renamedFilePath || statusPath); + }) + .filter(Boolean); +} + +function buildFilePathCandidates(projectPath, repositoryRootPath, filePath) { + const normalizedFilePath = normalizeRepositoryRelativeFilePath(filePath); + const projectRelativePath = normalizeRepositoryRelativeFilePath(path.relative(repositoryRootPath, projectPath)); + const candidates = [normalizedFilePath]; + + if ( + projectRelativePath + && projectRelativePath !== '.' + && !normalizedFilePath.startsWith(`${projectRelativePath}/`) + ) { + candidates.push(`${projectRelativePath}/${normalizedFilePath}`); + } + + return Array.from(new Set(candidates.filter(Boolean))); +} + +async function resolveRepositoryFilePath(projectPath, filePath) { + validateFilePath(filePath); + + const repositoryRootPath = await getRepositoryRootPath(projectPath); + const candidateFilePaths = buildFilePathCandidates(projectPath, repositoryRootPath, filePath); + + for (const candidateFilePath of candidateFilePaths) { + const { stdout } = await spawnAsync('git', ['status', '--porcelain', '--', candidateFilePath], { cwd: repositoryRootPath }); + if (stdout.trim()) { + return { + repositoryRootPath, + repositoryRelativeFilePath: candidateFilePath, + }; + } + } + + // If the caller sent a bare filename (e.g. "hello.ts"), recover it from changed files. + const normalizedFilePath = normalizeRepositoryRelativeFilePath(filePath); + if (!normalizedFilePath.includes('/')) { + const { stdout: repositoryStatusOutput } = await spawnAsync('git', ['status', '--porcelain'], { cwd: repositoryRootPath }); + const changedFilePaths = parseStatusFilePaths(repositoryStatusOutput); + const suffixMatches = changedFilePaths.filter( + (changedFilePath) => changedFilePath === normalizedFilePath || changedFilePath.endsWith(`/${normalizedFilePath}`), + ); + + if (suffixMatches.length === 1) { + return { + repositoryRootPath, + repositoryRelativeFilePath: suffixMatches[0], + }; + } + } + + return { + repositoryRootPath, + repositoryRelativeFilePath: candidateFilePaths[0], + }; +} + +// Get git status for a project +router.get('/status', async (req, res) => { + const { project } = req.query; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Validate git repository + await validateGitRepository(projectPath); + + const branch = await getCurrentBranchName(projectPath); + const hasCommits = await repositoryHasCommits(projectPath); + + // Get git status + const { stdout: statusOutput } = await spawnAsync('git', ['status', '--porcelain'], { cwd: projectPath }); + + const modified = []; + const added = []; + const deleted = []; + const untracked = []; + + statusOutput.split('\n').forEach(line => { + if (!line.trim()) return; + + const status = line.substring(0, 2); + const file = line.substring(3); + + if (status === 'M ' || status === ' M' || status === 'MM') { + modified.push(file); + } else if (status === 'A ' || status === 'AM') { + added.push(file); + } else if (status === 'D ' || status === ' D') { + deleted.push(file); + } else if (status === '??') { + untracked.push(file); + } + }); + + res.json({ + branch, + hasCommits, + modified, + added, + deleted, + untracked + }); + } catch (error) { + console.error('Git status error:', error); + res.json({ + error: error.message.includes('not a git repository') || error.message.includes('Project directory is not a git repository') + ? error.message + : 'Git operation failed', + details: error.message.includes('not a git repository') || error.message.includes('Project directory is not a git repository') + ? error.message + : `Failed to get git status: ${error.message}` + }); + } +}); + +// Get diff for a specific file +router.get('/diff', async (req, res) => { + const { project, file } = req.query; + + if (!project || !file) { + return res.status(400).json({ error: 'Project name and file path are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Validate git repository + await validateGitRepository(projectPath); + + const { + repositoryRootPath, + repositoryRelativeFilePath, + } = await resolveRepositoryFilePath(projectPath, file); + + // Check if file is untracked or deleted + const { stdout: statusOutput } = await spawnAsync( + 'git', + ['status', '--porcelain', '--', repositoryRelativeFilePath], + { cwd: repositoryRootPath }, + ); + const isUntracked = statusOutput.startsWith('??'); + const isDeleted = statusOutput.trim().startsWith('D ') || statusOutput.trim().startsWith(' D'); + + let diff; + if (isUntracked) { + // For untracked files, show the entire file content as additions + const filePath = path.join(repositoryRootPath, repositoryRelativeFilePath); + const stats = await fs.stat(filePath); + + if (stats.isDirectory()) { + // For directories, show a simple message + diff = `Directory: ${repositoryRelativeFilePath}\n(Cannot show diff for directories)`; + } else { + const fileContent = await fs.readFile(filePath, 'utf-8'); + const lines = fileContent.split('\n'); + diff = `--- /dev/null\n+++ b/${repositoryRelativeFilePath}\n@@ -0,0 +1,${lines.length} @@\n` + + lines.map(line => `+${line}`).join('\n'); + } + } else if (isDeleted) { + // For deleted files, show the entire file content from HEAD as deletions + const { stdout: fileContent } = await spawnAsync( + 'git', + ['show', `HEAD:${repositoryRelativeFilePath}`], + { cwd: repositoryRootPath }, + ); + const lines = fileContent.split('\n'); + diff = `--- a/${repositoryRelativeFilePath}\n+++ /dev/null\n@@ -1,${lines.length} +0,0 @@\n` + + lines.map(line => `-${line}`).join('\n'); + } else { + // Get diff for tracked files + // First check for unstaged changes (working tree vs index) + const { stdout: unstagedDiff } = await spawnAsync( + 'git', + ['diff', '--', repositoryRelativeFilePath], + { cwd: repositoryRootPath }, + ); + + if (unstagedDiff) { + // Show unstaged changes if they exist + diff = stripDiffHeaders(unstagedDiff); + } else { + // If no unstaged changes, check for staged changes (index vs HEAD) + const { stdout: stagedDiff } = await spawnAsync( + 'git', + ['diff', '--cached', '--', repositoryRelativeFilePath], + { cwd: repositoryRootPath }, + ); + diff = stripDiffHeaders(stagedDiff) || ''; + } + } + + res.json({ diff }); + } catch (error) { + console.error('Git diff error:', error); + res.json({ error: error.message }); + } +}); + +// Get file content with diff information for CodeEditor +router.get('/file-with-diff', async (req, res) => { + const { project, file } = req.query; + + if (!project || !file) { + return res.status(400).json({ error: 'Project name and file path are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Validate git repository + await validateGitRepository(projectPath); + + const { + repositoryRootPath, + repositoryRelativeFilePath, + } = await resolveRepositoryFilePath(projectPath, file); + + // Check file status + const { stdout: statusOutput } = await spawnAsync( + 'git', + ['status', '--porcelain', '--', repositoryRelativeFilePath], + { cwd: repositoryRootPath }, + ); + const isUntracked = statusOutput.startsWith('??'); + const isDeleted = statusOutput.trim().startsWith('D ') || statusOutput.trim().startsWith(' D'); + + let currentContent = ''; + let oldContent = ''; + + if (isDeleted) { + // For deleted files, get content from HEAD + const { stdout: headContent } = await spawnAsync( + 'git', + ['show', `HEAD:${repositoryRelativeFilePath}`], + { cwd: repositoryRootPath }, + ); + oldContent = headContent; + currentContent = headContent; // Show the deleted content in editor + } else { + // Get current file content + const filePath = path.join(repositoryRootPath, repositoryRelativeFilePath); + const stats = await fs.stat(filePath); + + if (stats.isDirectory()) { + // Cannot show content for directories + return res.status(400).json({ error: 'Cannot show diff for directories' }); + } + + currentContent = await fs.readFile(filePath, 'utf-8'); + + if (!isUntracked) { + // Get the old content from HEAD for tracked files + try { + const { stdout: headContent } = await spawnAsync( + 'git', + ['show', `HEAD:${repositoryRelativeFilePath}`], + { cwd: repositoryRootPath }, + ); + oldContent = headContent; + } catch (error) { + // File might be newly added to git (staged but not committed) + oldContent = ''; + } + } + } + + res.json({ + currentContent, + oldContent, + isDeleted, + isUntracked + }); + } catch (error) { + console.error('Git file-with-diff error:', error); + res.json({ error: error.message }); + } +}); + +// Create initial commit +router.post('/initial-commit', async (req, res) => { + const { project } = req.body; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Validate git repository + await validateGitRepository(projectPath); + + // Check if there are already commits + try { + await spawnAsync('git', ['rev-parse', 'HEAD'], { cwd: projectPath }); + return res.status(400).json({ error: 'Repository already has commits. Use regular commit instead.' }); + } catch (error) { + // No HEAD - this is good, we can create initial commit + } + + // Add all files + await spawnAsync('git', ['add', '.'], { cwd: projectPath }); + + // Create initial commit + const { stdout } = await spawnAsync('git', ['commit', '-m', 'Initial commit'], { cwd: projectPath }); + + res.json({ success: true, output: stdout, message: 'Initial commit created successfully' }); + } catch (error) { + console.error('Git initial commit error:', error); + + // Handle the case where there's nothing to commit + if (error.message.includes('nothing to commit')) { + return res.status(400).json({ + error: 'Nothing to commit', + details: 'No files found in the repository. Add some files first.' + }); + } + + res.status(500).json({ error: error.message }); + } +}); + +// Commit changes +router.post('/commit', async (req, res) => { + const { project, message, files } = req.body; + + if (!project || !message || !files || files.length === 0) { + return res.status(400).json({ error: 'Project name, commit message, and files are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Validate git repository + await validateGitRepository(projectPath); + const repositoryRootPath = await getRepositoryRootPath(projectPath); + + // Stage selected files + for (const file of files) { + const { repositoryRelativeFilePath } = await resolveRepositoryFilePath(projectPath, file); + await spawnAsync('git', ['add', '--', repositoryRelativeFilePath], { cwd: repositoryRootPath }); + } + + // Commit with message + const { stdout } = await spawnAsync('git', ['commit', '-m', message], { cwd: repositoryRootPath }); + + res.json({ success: true, output: stdout }); + } catch (error) { + console.error('Git commit error:', error); + res.status(500).json({ error: error.message }); + } +}); + +// Revert latest local commit (keeps changes staged) +router.post('/revert-local-commit', async (req, res) => { + const { project } = req.body; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + + try { + await spawnAsync('git', ['rev-parse', '--verify', 'HEAD'], { cwd: projectPath }); + } catch (error) { + return res.status(400).json({ + error: 'No local commit to revert', + details: 'This repository has no commit yet.', + }); + } + + try { + // Soft reset rewinds one commit while preserving all file changes in the index. + await spawnAsync('git', ['reset', '--soft', 'HEAD~1'], { cwd: projectPath }); + } catch (error) { + const errorDetails = `${error.stderr || ''} ${error.message || ''}`; + const isInitialCommit = errorDetails.includes('HEAD~1') && + (errorDetails.includes('unknown revision') || errorDetails.includes('ambiguous argument')); + + if (!isInitialCommit) { + throw error; + } + + // Initial commit has no parent; deleting HEAD uncommits it and keeps files staged. + await spawnAsync('git', ['update-ref', '-d', 'HEAD'], { cwd: projectPath }); + } + + res.json({ + success: true, + output: 'Latest local commit reverted successfully. Changes were kept staged.', + }); + } catch (error) { + console.error('Git revert local commit error:', error); + res.status(500).json({ error: error.message }); + } +}); + +// Get list of branches +router.get('/branches', async (req, res) => { + const { project } = req.query; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Validate git repository + await validateGitRepository(projectPath); + + // Get all branches + const { stdout } = await spawnAsync('git', ['branch', '-a'], { cwd: projectPath }); + + const rawLines = stdout + .split('\n') + .map(b => b.trim()) + .filter(b => b && !b.includes('->')); + + // Local branches (may start with '* ' for current) + const localBranches = rawLines + .filter(b => !b.startsWith('remotes/')) + .map(b => (b.startsWith('* ') ? b.substring(2) : b)); + + // Remote branches — strip 'remotes//' prefix + const remoteBranches = rawLines + .filter(b => b.startsWith('remotes/')) + .map(b => b.replace(/^remotes\/[^/]+\//, '')) + .filter(name => !localBranches.includes(name)); // skip if already a local branch + + // Backward-compat flat list (local + unique remotes, deduplicated) + const branches = [...localBranches, ...remoteBranches] + .filter((b, i, arr) => arr.indexOf(b) === i); + + res.json({ branches, localBranches, remoteBranches }); + } catch (error) { + console.error('Git branches error:', error); + res.json({ error: error.message }); + } +}); + +// Checkout branch +router.post('/checkout', async (req, res) => { + const { project, branch } = req.body; + + if (!project || !branch) { + return res.status(400).json({ error: 'Project name and branch are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Checkout the branch + validateBranchName(branch); + const { stdout } = await spawnAsync('git', ['checkout', branch], { cwd: projectPath }); + + res.json({ success: true, output: stdout }); + } catch (error) { + console.error('Git checkout error:', error); + res.status(500).json({ error: error.message }); + } +}); + +// Create new branch +router.post('/create-branch', async (req, res) => { + const { project, branch } = req.body; + + if (!project || !branch) { + return res.status(400).json({ error: 'Project name and branch name are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Create and checkout new branch + validateBranchName(branch); + const { stdout } = await spawnAsync('git', ['checkout', '-b', branch], { cwd: projectPath }); + + res.json({ success: true, output: stdout }); + } catch (error) { + console.error('Git create branch error:', error); + res.status(500).json({ error: error.message }); + } +}); + +// Delete a local branch +router.post('/delete-branch', async (req, res) => { + const { project, branch } = req.body; + + if (!project || !branch) { + return res.status(400).json({ error: 'Project name and branch name are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + + // Safety: cannot delete the currently checked-out branch + const { stdout: currentBranch } = await spawnAsync('git', ['branch', '--show-current'], { cwd: projectPath }); + if (currentBranch.trim() === branch) { + return res.status(400).json({ error: 'Cannot delete the currently checked-out branch' }); + } + + const { stdout } = await spawnAsync('git', ['branch', '-d', branch], { cwd: projectPath }); + res.json({ success: true, output: stdout }); + } catch (error) { + console.error('Git delete branch error:', error); + res.status(500).json({ error: error.message }); + } +}); + +// Get recent commits +router.get('/commits', async (req, res) => { + const { project, limit = 10 } = req.query; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + const parsedLimit = Number.parseInt(String(limit), 10); + const safeLimit = Number.isFinite(parsedLimit) && parsedLimit > 0 + ? Math.min(parsedLimit, 100) + : 10; + + // Get commit log with stats + const { stdout } = await spawnAsync( + 'git', + ['log', '--pretty=format:%H|%an|%ae|%ad|%s', '--date=iso-strict', '-n', String(safeLimit)], + { cwd: projectPath }, + ); + + const commits = stdout + .split('\n') + .filter(line => line.trim()) + .map(line => { + const [hash, author, email, date, ...messageParts] = line.split('|'); + return { + hash, + author, + email, + date, + message: messageParts.join('|') + }; + }); + + // Get stats for each commit + for (const commit of commits) { + try { + const { stdout: stats } = await spawnAsync( + 'git', ['show', '--stat', '--format=', commit.hash], + { cwd: projectPath } + ); + commit.stats = stats.trim().split('\n').pop(); // Get the summary line + } catch (error) { + commit.stats = ''; + } + } + + res.json({ commits }); + } catch (error) { + console.error('Git commits error:', error); + res.json({ error: error.message }); + } +}); + +// Get diff for a specific commit +router.get('/commit-diff', async (req, res) => { + const { project, commit } = req.query; + + if (!project || !commit) { + return res.status(400).json({ error: 'Project name and commit hash are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + + // Validate commit reference (defense-in-depth) + validateCommitRef(commit); + + // Get diff for the commit + const { stdout } = await spawnAsync( + 'git', ['show', commit], + { cwd: projectPath } + ); + + const isTruncated = stdout.length > COMMIT_DIFF_CHARACTER_LIMIT; + const diff = isTruncated + ? `${stdout.slice(0, COMMIT_DIFF_CHARACTER_LIMIT)}\n\n... Diff truncated to keep the UI responsive ...` + : stdout; + + res.json({ diff, isTruncated }); + } catch (error) { + console.error('Git commit diff error:', error); + res.json({ error: error.message }); + } +}); + +// Generate commit message based on staged changes using AI +router.post('/generate-commit-message', async (req, res) => { + const { project, files, provider = 'claude' } = req.body; + + if (!project || !files || files.length === 0) { + return res.status(400).json({ error: 'Project name and files are required' }); + } + + // Validate provider + if (!['claude', 'cursor'].includes(provider)) { + return res.status(400).json({ error: 'provider must be "claude" or "cursor"' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + const repositoryRootPath = await getRepositoryRootPath(projectPath); + + // Get diff for selected files + let diffContext = ''; + for (const file of files) { + try { + const { repositoryRelativeFilePath } = await resolveRepositoryFilePath(projectPath, file); + const { stdout } = await spawnAsync( + 'git', ['diff', 'HEAD', '--', repositoryRelativeFilePath], + { cwd: repositoryRootPath } + ); + if (stdout) { + diffContext += `\n--- ${repositoryRelativeFilePath} ---\n${stdout}`; + } + } catch (error) { + console.error(`Error getting diff for ${file}:`, error); + } + } + + // If no diff found, might be untracked files + if (!diffContext.trim()) { + // Try to get content of untracked files + for (const file of files) { + try { + const { repositoryRelativeFilePath } = await resolveRepositoryFilePath(projectPath, file); + const filePath = path.join(repositoryRootPath, repositoryRelativeFilePath); + const stats = await fs.stat(filePath); + + if (!stats.isDirectory()) { + const content = await fs.readFile(filePath, 'utf-8'); + diffContext += `\n--- ${repositoryRelativeFilePath} (new file) ---\n${content.substring(0, 1000)}\n`; + } else { + diffContext += `\n--- ${repositoryRelativeFilePath} (new directory) ---\n`; + } + } catch (error) { + console.error(`Error reading file ${file}:`, error); + } + } + } + + // Generate commit message using AI + const message = await generateCommitMessageWithAI(files, diffContext, provider, projectPath); + + res.json({ message }); + } catch (error) { + console.error('Generate commit message error:', error); + res.status(500).json({ error: error.message }); + } +}); + +/** + * Generates a commit message using AI (Claude SDK or Cursor CLI) + * @param {Array} files - List of changed files + * @param {string} diffContext - Git diff content + * @param {string} provider - 'claude' or 'cursor' + * @param {string} projectPath - Project directory path + * @returns {Promise} Generated commit message + */ +async function generateCommitMessageWithAI(files, diffContext, provider, projectPath) { + // Create the prompt + const prompt = `Generate a conventional commit message for these changes. + +REQUIREMENTS: +- Format: type(scope): subject +- Include body explaining what changed and why +- Types: feat, fix, docs, style, refactor, perf, test, build, ci, chore +- Subject under 50 chars, body wrapped at 72 chars +- Focus on user-facing changes, not implementation details +- Consider what's being added AND removed +- Return ONLY the commit message (no markdown, explanations, or code blocks) + +FILES CHANGED: +${files.map(f => `- ${f}`).join('\n')} + +DIFFS: +${diffContext.substring(0, 4000)} + +Generate the commit message:`; + + try { + // Create a simple writer that collects the response + let responseText = ''; + const writer = { + send: (data) => { + try { + const parsed = typeof data === 'string' ? JSON.parse(data) : data; + console.log('🔍 Writer received message type:', parsed.type); + + // Handle different message formats from Claude SDK and Cursor CLI + // Claude SDK sends: {type: 'claude-response', data: {message: {content: [...]}}} + if (parsed.type === 'claude-response' && parsed.data) { + const message = parsed.data.message || parsed.data; + console.log('📦 Claude response message:', JSON.stringify(message, null, 2).substring(0, 500)); + if (message.content && Array.isArray(message.content)) { + // Extract text from content array + for (const item of message.content) { + if (item.type === 'text' && item.text) { + console.log('✅ Extracted text chunk:', item.text.substring(0, 100)); + responseText += item.text; + } + } + } + } + // Cursor CLI sends: {type: 'cursor-output', output: '...'} + else if (parsed.type === 'cursor-output' && parsed.output) { + console.log('✅ Cursor output:', parsed.output.substring(0, 100)); + responseText += parsed.output; + } + // Also handle direct text messages + else if (parsed.type === 'text' && parsed.text) { + console.log('✅ Direct text:', parsed.text.substring(0, 100)); + responseText += parsed.text; + } + } catch (e) { + // Ignore parse errors + console.error('Error parsing writer data:', e); + } + }, + setSessionId: () => {}, // No-op for this use case + }; + + console.log('🚀 Calling AI agent with provider:', provider); + console.log('📝 Prompt length:', prompt.length); + + // Call the appropriate agent + if (provider === 'claude') { + await queryClaudeSDK(prompt, { + cwd: projectPath, + permissionMode: 'bypassPermissions', + model: 'sonnet' + }, writer); + } else if (provider === 'cursor') { + await spawnCursor(prompt, { + cwd: projectPath, + skipPermissions: true + }, writer); + } + + console.log('📊 Total response text collected:', responseText.length, 'characters'); + console.log('📄 Response preview:', responseText.substring(0, 200)); + + // Clean up the response + const cleanedMessage = cleanCommitMessage(responseText); + console.log('🧹 Cleaned message:', cleanedMessage.substring(0, 200)); + + return cleanedMessage || 'chore: update files'; + } catch (error) { + console.error('Error generating commit message with AI:', error); + // Fallback to simple message + return `chore: update ${files.length} file${files.length !== 1 ? 's' : ''}`; + } +} + +/** + * Cleans the AI-generated commit message by removing markdown, code blocks, and extra formatting + * @param {string} text - Raw AI response + * @returns {string} Clean commit message + */ +function cleanCommitMessage(text) { + if (!text || !text.trim()) { + return ''; + } + + let cleaned = text.trim(); + + // Remove markdown code blocks + cleaned = cleaned.replace(/```[a-z]*\n/g, ''); + cleaned = cleaned.replace(/```/g, ''); + + // Remove markdown headers + cleaned = cleaned.replace(/^#+\s*/gm, ''); + + // Remove leading/trailing quotes + cleaned = cleaned.replace(/^["']|["']$/g, ''); + + // If there are multiple lines, take everything (subject + body) + // Just clean up extra blank lines + cleaned = cleaned.replace(/\n{3,}/g, '\n\n'); + + // Remove any explanatory text before the actual commit message + // Look for conventional commit pattern and start from there + const conventionalCommitMatch = cleaned.match(/(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\(.+?\))?:.+/s); + if (conventionalCommitMatch) { + cleaned = cleaned.substring(cleaned.indexOf(conventionalCommitMatch[0])); + } + + return cleaned.trim(); +} + +// Get remote status (ahead/behind commits with smart remote detection) +router.get('/remote-status', async (req, res) => { + const { project } = req.query; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + + const branch = await getCurrentBranchName(projectPath); + const hasCommits = await repositoryHasCommits(projectPath); + + const { stdout: remoteOutput } = await spawnAsync('git', ['remote'], { cwd: projectPath }); + const remotes = remoteOutput.trim().split('\n').filter(r => r.trim()); + const hasRemote = remotes.length > 0; + const fallbackRemoteName = hasRemote + ? (remotes.includes('origin') ? 'origin' : remotes[0]) + : null; + + // Repositories initialized with `git init` can have a branch but no commits. + // Return a non-error state so the UI can show the initial-commit workflow. + if (!hasCommits) { + return res.json({ + hasRemote, + hasUpstream: false, + branch, + remoteName: fallbackRemoteName, + ahead: 0, + behind: 0, + isUpToDate: false, + message: 'Repository has no commits yet' + }); + } + + // Check if there's a remote tracking branch (smart detection) + let trackingBranch; + let remoteName; + try { + const { stdout } = await spawnAsync('git', ['rev-parse', '--abbrev-ref', `${branch}@{upstream}`], { cwd: projectPath }); + trackingBranch = stdout.trim(); + remoteName = trackingBranch.split('/')[0]; // Extract remote name (e.g., "origin/main" -> "origin") + } catch (error) { + return res.json({ + hasRemote, + hasUpstream: false, + branch, + remoteName: fallbackRemoteName, + message: 'No remote tracking branch configured' + }); + } + + // Get ahead/behind counts + const { stdout: countOutput } = await spawnAsync( + 'git', ['rev-list', '--count', '--left-right', `${trackingBranch}...HEAD`], + { cwd: projectPath } + ); + + const [behind, ahead] = countOutput.trim().split('\t').map(Number); + + res.json({ + hasRemote: true, + hasUpstream: true, + branch, + remoteBranch: trackingBranch, + remoteName, + ahead: ahead || 0, + behind: behind || 0, + isUpToDate: ahead === 0 && behind === 0 + }); + } catch (error) { + console.error('Git remote status error:', error); + res.json({ error: error.message }); + } +}); + +// Fetch from remote (using smart remote detection) +router.post('/fetch', async (req, res) => { + const { project } = req.body; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + + // Get current branch and its upstream remote + const branch = await getCurrentBranchName(projectPath); + + let remoteName = 'origin'; // fallback + try { + const { stdout } = await spawnAsync('git', ['rev-parse', '--abbrev-ref', `${branch}@{upstream}`], { cwd: projectPath }); + remoteName = stdout.trim().split('/')[0]; // Extract remote name + } catch (error) { + // No upstream, try to fetch from origin anyway + console.log('No upstream configured, using origin as fallback'); + } + + validateRemoteName(remoteName); + const { stdout } = await spawnAsync('git', ['fetch', remoteName], { cwd: projectPath }); + + res.json({ success: true, output: stdout || 'Fetch completed successfully', remoteName }); + } catch (error) { + console.error('Git fetch error:', error); + res.status(500).json({ + error: 'Fetch failed', + details: error.message.includes('Could not resolve hostname') + ? 'Unable to connect to remote repository. Check your internet connection.' + : error.message.includes('fatal: \'origin\' does not appear to be a git repository') + ? 'No remote repository configured. Add a remote with: git remote add origin ' + : error.message + }); + } +}); + +// Pull from remote (fetch + merge using smart remote detection) +router.post('/pull', async (req, res) => { + const { project } = req.body; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + + // Get current branch and its upstream remote + const branch = await getCurrentBranchName(projectPath); + + let remoteName = 'origin'; // fallback + let remoteBranch = branch; // fallback + try { + const { stdout } = await spawnAsync('git', ['rev-parse', '--abbrev-ref', `${branch}@{upstream}`], { cwd: projectPath }); + const tracking = stdout.trim(); + remoteName = tracking.split('/')[0]; // Extract remote name + remoteBranch = tracking.split('/').slice(1).join('/'); // Extract branch name + } catch (error) { + // No upstream, use fallback + console.log('No upstream configured, using origin/branch as fallback'); + } + + validateRemoteName(remoteName); + validateBranchName(remoteBranch); + const { stdout } = await spawnAsync('git', ['pull', remoteName, remoteBranch], { cwd: projectPath }); + + res.json({ + success: true, + output: stdout || 'Pull completed successfully', + remoteName, + remoteBranch + }); + } catch (error) { + console.error('Git pull error:', error); + + // Enhanced error handling for common pull scenarios + let errorMessage = 'Pull failed'; + let details = error.message; + + if (error.message.includes('CONFLICT')) { + errorMessage = 'Merge conflicts detected'; + details = 'Pull created merge conflicts. Please resolve conflicts manually in the editor, then commit the changes.'; + } else if (error.message.includes('Please commit your changes or stash them')) { + errorMessage = 'Uncommitted changes detected'; + details = 'Please commit or stash your local changes before pulling.'; + } else if (error.message.includes('Could not resolve hostname')) { + errorMessage = 'Network error'; + details = 'Unable to connect to remote repository. Check your internet connection.'; + } else if (error.message.includes('fatal: \'origin\' does not appear to be a git repository')) { + errorMessage = 'Remote not configured'; + details = 'No remote repository configured. Add a remote with: git remote add origin '; + } else if (error.message.includes('diverged')) { + errorMessage = 'Branches have diverged'; + details = 'Your local branch and remote branch have diverged. Consider fetching first to review changes.'; + } + + res.status(500).json({ + error: errorMessage, + details: details + }); + } +}); + +// Push commits to remote repository +router.post('/push', async (req, res) => { + const { project } = req.body; + + if (!project) { + return res.status(400).json({ error: 'Project name is required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + + // Get current branch and its upstream remote + const branch = await getCurrentBranchName(projectPath); + + let remoteName = 'origin'; // fallback + let remoteBranch = branch; // fallback + try { + const { stdout } = await spawnAsync('git', ['rev-parse', '--abbrev-ref', `${branch}@{upstream}`], { cwd: projectPath }); + const tracking = stdout.trim(); + remoteName = tracking.split('/')[0]; // Extract remote name + remoteBranch = tracking.split('/').slice(1).join('/'); // Extract branch name + } catch (error) { + // No upstream, use fallback + console.log('No upstream configured, using origin/branch as fallback'); + } + + validateRemoteName(remoteName); + validateBranchName(remoteBranch); + const { stdout } = await spawnAsync('git', ['push', remoteName, remoteBranch], { cwd: projectPath }); + + res.json({ + success: true, + output: stdout || 'Push completed successfully', + remoteName, + remoteBranch + }); + } catch (error) { + console.error('Git push error:', error); + + // Enhanced error handling for common push scenarios + let errorMessage = 'Push failed'; + let details = error.message; + + if (error.message.includes('rejected')) { + errorMessage = 'Push rejected'; + details = 'The remote has newer commits. Pull first to merge changes before pushing.'; + } else if (error.message.includes('non-fast-forward')) { + errorMessage = 'Non-fast-forward push'; + details = 'Your branch is behind the remote. Pull the latest changes first.'; + } else if (error.message.includes('Could not resolve hostname')) { + errorMessage = 'Network error'; + details = 'Unable to connect to remote repository. Check your internet connection.'; + } else if (error.message.includes('fatal: \'origin\' does not appear to be a git repository')) { + errorMessage = 'Remote not configured'; + details = 'No remote repository configured. Add a remote with: git remote add origin '; + } else if (error.message.includes('Permission denied')) { + errorMessage = 'Authentication failed'; + details = 'Permission denied. Check your credentials or SSH keys.'; + } else if (error.message.includes('no upstream branch')) { + errorMessage = 'No upstream branch'; + details = 'No upstream branch configured. Use: git push --set-upstream origin '; + } + + res.status(500).json({ + error: errorMessage, + details: details + }); + } +}); + +// Publish branch to remote (set upstream and push) +router.post('/publish', async (req, res) => { + const { project, branch } = req.body; + + if (!project || !branch) { + return res.status(400).json({ error: 'Project name and branch are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + + // Validate branch name + validateBranchName(branch); + + // Get current branch to verify it matches the requested branch + const currentBranchName = await getCurrentBranchName(projectPath); + + if (currentBranchName !== branch) { + return res.status(400).json({ + error: `Branch mismatch. Current branch is ${currentBranchName}, but trying to publish ${branch}` + }); + } + + // Check if remote exists + let remoteName = 'origin'; + try { + const { stdout } = await spawnAsync('git', ['remote'], { cwd: projectPath }); + const remotes = stdout.trim().split('\n').filter(r => r.trim()); + if (remotes.length === 0) { + return res.status(400).json({ + error: 'No remote repository configured. Add a remote with: git remote add origin ' + }); + } + remoteName = remotes.includes('origin') ? 'origin' : remotes[0]; + } catch (error) { + return res.status(400).json({ + error: 'No remote repository configured. Add a remote with: git remote add origin ' + }); + } + + // Publish the branch (set upstream and push) + validateRemoteName(remoteName); + const { stdout } = await spawnAsync('git', ['push', '--set-upstream', remoteName, branch], { cwd: projectPath }); + + res.json({ + success: true, + output: stdout || 'Branch published successfully', + remoteName, + branch + }); + } catch (error) { + console.error('Git publish error:', error); + + // Enhanced error handling for common publish scenarios + let errorMessage = 'Publish failed'; + let details = error.message; + + if (error.message.includes('rejected')) { + errorMessage = 'Publish rejected'; + details = 'The remote branch already exists and has different commits. Use push instead.'; + } else if (error.message.includes('Could not resolve hostname')) { + errorMessage = 'Network error'; + details = 'Unable to connect to remote repository. Check your internet connection.'; + } else if (error.message.includes('Permission denied')) { + errorMessage = 'Authentication failed'; + details = 'Permission denied. Check your credentials or SSH keys.'; + } else if (error.message.includes('fatal:') && error.message.includes('does not appear to be a git repository')) { + errorMessage = 'Remote not configured'; + details = 'Remote repository not properly configured. Check your remote URL.'; + } + + res.status(500).json({ + error: errorMessage, + details: details + }); + } +}); + +// Discard changes for a specific file +router.post('/discard', async (req, res) => { + const { project, file } = req.body; + + if (!project || !file) { + return res.status(400).json({ error: 'Project name and file path are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + const { + repositoryRootPath, + repositoryRelativeFilePath, + } = await resolveRepositoryFilePath(projectPath, file); + + // Check file status to determine correct discard command + const { stdout: statusOutput } = await spawnAsync( + 'git', + ['status', '--porcelain', '--', repositoryRelativeFilePath], + { cwd: repositoryRootPath }, + ); + + if (!statusOutput.trim()) { + return res.status(400).json({ error: 'No changes to discard for this file' }); + } + + const status = statusOutput.substring(0, 2); + + if (status === '??') { + // Untracked file or directory - delete it + const filePath = path.join(repositoryRootPath, repositoryRelativeFilePath); + const stats = await fs.stat(filePath); + + if (stats.isDirectory()) { + await fs.rm(filePath, { recursive: true, force: true }); + } else { + await fs.unlink(filePath); + } + } else if (status.includes('M') || status.includes('D')) { + // Modified or deleted file - restore from HEAD + await spawnAsync('git', ['restore', '--', repositoryRelativeFilePath], { cwd: repositoryRootPath }); + } else if (status.includes('A')) { + // Added file - unstage it + await spawnAsync('git', ['reset', 'HEAD', '--', repositoryRelativeFilePath], { cwd: repositoryRootPath }); + } + + res.json({ success: true, message: `Changes discarded for ${repositoryRelativeFilePath}` }); + } catch (error) { + console.error('Git discard error:', error); + res.status(500).json({ error: error.message }); + } +}); + +// Delete untracked file +router.post('/delete-untracked', async (req, res) => { + const { project, file } = req.body; + + if (!project || !file) { + return res.status(400).json({ error: 'Project name and file path are required' }); + } + + try { + const projectPath = await getActualProjectPath(project); + await validateGitRepository(projectPath); + const { + repositoryRootPath, + repositoryRelativeFilePath, + } = await resolveRepositoryFilePath(projectPath, file); + + // Check if file is actually untracked + const { stdout: statusOutput } = await spawnAsync( + 'git', + ['status', '--porcelain', '--', repositoryRelativeFilePath], + { cwd: repositoryRootPath }, + ); + + if (!statusOutput.trim()) { + return res.status(400).json({ error: 'File is not untracked or does not exist' }); + } + + const status = statusOutput.substring(0, 2); + + if (status !== '??') { + return res.status(400).json({ error: 'File is not untracked. Use discard for tracked files.' }); + } + + // Delete the untracked file or directory + const filePath = path.join(repositoryRootPath, repositoryRelativeFilePath); + const stats = await fs.stat(filePath); + + if (stats.isDirectory()) { + // Use rm with recursive option for directories + await fs.rm(filePath, { recursive: true, force: true }); + res.json({ success: true, message: `Untracked directory ${repositoryRelativeFilePath} deleted successfully` }); + } else { + await fs.unlink(filePath); + res.json({ success: true, message: `Untracked file ${repositoryRelativeFilePath} deleted successfully` }); + } + } catch (error) { + console.error('Git delete untracked error:', error); + res.status(500).json({ error: error.message }); + } +}); + +export default router; diff --git a/server/src/modules/mcp-utils/mcp-utils.routes.js b/server/src/modules/mcp-utils/mcp-utils.routes.js new file mode 100644 index 00000000..1de5e33a --- /dev/null +++ b/server/src/modules/mcp-utils/mcp-utils.routes.js @@ -0,0 +1,48 @@ +/** + * MCP UTILITIES API ROUTES + * ======================== + * + * API endpoints for MCP server detection and configuration utilities. + * These endpoints expose centralized MCP detection functionality. + */ + +import express from 'express'; +import { detectTaskMasterMCPServer, getAllMCPServers } from '../../../utils/mcp-detector.js'; + +const router = express.Router(); + +/** + * GET /api/mcp-utils/taskmaster-server + * Check if TaskMaster MCP server is configured + */ +router.get('/taskmaster-server', async (req, res) => { + try { + const result = await detectTaskMasterMCPServer(); + res.json(result); + } catch (error) { + console.error('TaskMaster MCP detection error:', error); + res.status(500).json({ + error: 'Failed to detect TaskMaster MCP server', + message: error.message + }); + } +}); + +/** + * GET /api/mcp-utils/all-servers + * Get all configured MCP servers + */ +router.get('/all-servers', async (req, res) => { + try { + const result = await getAllMCPServers(); + res.json(result); + } catch (error) { + console.error('MCP servers detection error:', error); + res.status(500).json({ + error: 'Failed to get MCP servers', + message: error.message + }); + } +}); + +export default router; \ No newline at end of file diff --git a/server/src/modules/mcp/mcp.routes.js b/server/src/modules/mcp/mcp.routes.js new file mode 100644 index 00000000..080be6ab --- /dev/null +++ b/server/src/modules/mcp/mcp.routes.js @@ -0,0 +1,552 @@ +import express from 'express'; +import { promises as fs } from 'fs'; +import path from 'path'; +import os from 'os'; +import { fileURLToPath } from 'url'; +import { dirname } from 'path'; +import { spawn } from 'child_process'; + +const router = express.Router(); +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +// Claude CLI command routes + +// GET /api/mcp/cli/list - List MCP servers using Claude CLI +router.get('/cli/list', async (req, res) => { + try { + console.log('📋 Listing MCP servers using Claude CLI'); + + const { spawn } = await import('child_process'); + const { promisify } = await import('util'); + const exec = promisify(spawn); + + const process = spawn('claude', ['mcp', 'list'], { + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + process.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + process.on('close', (code) => { + if (code === 0) { + res.json({ success: true, output: stdout, servers: parseClaudeListOutput(stdout) }); + } else { + console.error('Claude CLI error:', stderr); + res.status(500).json({ error: 'Claude CLI command failed', details: stderr }); + } + }); + + process.on('error', (error) => { + console.error('Error running Claude CLI:', error); + res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message }); + }); + } catch (error) { + console.error('Error listing MCP servers via CLI:', error); + res.status(500).json({ error: 'Failed to list MCP servers', details: error.message }); + } +}); + +// POST /api/mcp/cli/add - Add MCP server using Claude CLI +router.post('/cli/add', async (req, res) => { + try { + const { name, type = 'stdio', command, args = [], url, headers = {}, env = {}, scope = 'user', projectPath } = req.body; + + console.log(`➕ Adding MCP server using Claude CLI (${scope} scope):`, name); + + const { spawn } = await import('child_process'); + + let cliArgs = ['mcp', 'add']; + + // Add scope flag + cliArgs.push('--scope', scope); + + if (type === 'http') { + cliArgs.push('--transport', 'http', name, url); + // Add headers if provided + Object.entries(headers).forEach(([key, value]) => { + cliArgs.push('--header', `${key}: ${value}`); + }); + } else if (type === 'sse') { + cliArgs.push('--transport', 'sse', name, url); + // Add headers if provided + Object.entries(headers).forEach(([key, value]) => { + cliArgs.push('--header', `${key}: ${value}`); + }); + } else { + // stdio (default): claude mcp add --scope user [args...] + cliArgs.push(name); + // Add environment variables + Object.entries(env).forEach(([key, value]) => { + cliArgs.push('-e', `${key}=${value}`); + }); + cliArgs.push(command); + if (args && args.length > 0) { + cliArgs.push(...args); + } + } + + console.log('🔧 Running Claude CLI command:', 'claude', cliArgs.join(' ')); + + // For local scope, we need to run the command in the project directory + const spawnOptions = { + stdio: ['pipe', 'pipe', 'pipe'] + }; + + if (scope === 'local' && projectPath) { + spawnOptions.cwd = projectPath; + console.log('📁 Running in project directory:', projectPath); + } + + const process = spawn('claude', cliArgs, spawnOptions); + + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + process.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + process.on('close', (code) => { + if (code === 0) { + res.json({ success: true, output: stdout, message: `MCP server "${name}" added successfully` }); + } else { + console.error('Claude CLI error:', stderr); + res.status(400).json({ error: 'Claude CLI command failed', details: stderr }); + } + }); + + process.on('error', (error) => { + console.error('Error running Claude CLI:', error); + res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message }); + }); + } catch (error) { + console.error('Error adding MCP server via CLI:', error); + res.status(500).json({ error: 'Failed to add MCP server', details: error.message }); + } +}); + +// POST /api/mcp/cli/add-json - Add MCP server using JSON format +router.post('/cli/add-json', async (req, res) => { + try { + const { name, jsonConfig, scope = 'user', projectPath } = req.body; + + console.log('➕ Adding MCP server using JSON format:', name); + + // Validate and parse JSON config + let parsedConfig; + try { + parsedConfig = typeof jsonConfig === 'string' ? JSON.parse(jsonConfig) : jsonConfig; + } catch (parseError) { + return res.status(400).json({ + error: 'Invalid JSON configuration', + details: parseError.message + }); + } + + // Validate required fields + if (!parsedConfig.type) { + return res.status(400).json({ + error: 'Invalid configuration', + details: 'Missing required field: type' + }); + } + + if (parsedConfig.type === 'stdio' && !parsedConfig.command) { + return res.status(400).json({ + error: 'Invalid configuration', + details: 'stdio type requires a command field' + }); + } + + if ((parsedConfig.type === 'http' || parsedConfig.type === 'sse') && !parsedConfig.url) { + return res.status(400).json({ + error: 'Invalid configuration', + details: `${parsedConfig.type} type requires a url field` + }); + } + + const { spawn } = await import('child_process'); + + // Build the command: claude mcp add-json --scope '' + const cliArgs = ['mcp', 'add-json', '--scope', scope, name]; + + // Add the JSON config as a properly formatted string + const jsonString = JSON.stringify(parsedConfig); + cliArgs.push(jsonString); + + console.log('🔧 Running Claude CLI command:', 'claude', cliArgs[0], cliArgs[1], cliArgs[2], cliArgs[3], cliArgs[4], jsonString); + + // For local scope, we need to run the command in the project directory + const spawnOptions = { + stdio: ['pipe', 'pipe', 'pipe'] + }; + + if (scope === 'local' && projectPath) { + spawnOptions.cwd = projectPath; + console.log('📁 Running in project directory:', projectPath); + } + + const process = spawn('claude', cliArgs, spawnOptions); + + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + process.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + process.on('close', (code) => { + if (code === 0) { + res.json({ success: true, output: stdout, message: `MCP server "${name}" added successfully via JSON` }); + } else { + console.error('Claude CLI error:', stderr); + res.status(400).json({ error: 'Claude CLI command failed', details: stderr }); + } + }); + + process.on('error', (error) => { + console.error('Error running Claude CLI:', error); + res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message }); + }); + } catch (error) { + console.error('Error adding MCP server via JSON:', error); + res.status(500).json({ error: 'Failed to add MCP server', details: error.message }); + } +}); + +// DELETE /api/mcp/cli/remove/:name - Remove MCP server using Claude CLI +router.delete('/cli/remove/:name', async (req, res) => { + try { + const { name } = req.params; + const { scope } = req.query; // Get scope from query params + + // Handle the ID format (remove scope prefix if present) + let actualName = name; + let actualScope = scope; + + // If the name includes a scope prefix like "local:test", extract it + if (name.includes(':')) { + const [prefix, serverName] = name.split(':'); + actualName = serverName; + actualScope = actualScope || prefix; // Use prefix as scope if not provided in query + } + + console.log('🗑️ Removing MCP server using Claude CLI:', actualName, 'scope:', actualScope); + + const { spawn } = await import('child_process'); + + // Build command args based on scope + let cliArgs = ['mcp', 'remove']; + + // Add scope flag if it's local scope + if (actualScope === 'local') { + cliArgs.push('--scope', 'local'); + } else if (actualScope === 'user' || !actualScope) { + // User scope is default, but we can be explicit + cliArgs.push('--scope', 'user'); + } + + cliArgs.push(actualName); + + console.log('🔧 Running Claude CLI command:', 'claude', cliArgs.join(' ')); + + const process = spawn('claude', cliArgs, { + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + process.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + process.on('close', (code) => { + if (code === 0) { + res.json({ success: true, output: stdout, message: `MCP server "${name}" removed successfully` }); + } else { + console.error('Claude CLI error:', stderr); + res.status(400).json({ error: 'Claude CLI command failed', details: stderr }); + } + }); + + process.on('error', (error) => { + console.error('Error running Claude CLI:', error); + res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message }); + }); + } catch (error) { + console.error('Error removing MCP server via CLI:', error); + res.status(500).json({ error: 'Failed to remove MCP server', details: error.message }); + } +}); + +// GET /api/mcp/cli/get/:name - Get MCP server details using Claude CLI +router.get('/cli/get/:name', async (req, res) => { + try { + const { name } = req.params; + + console.log('📄 Getting MCP server details using Claude CLI:', name); + + const { spawn } = await import('child_process'); + + const process = spawn('claude', ['mcp', 'get', name], { + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + process.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + process.on('close', (code) => { + if (code === 0) { + res.json({ success: true, output: stdout, server: parseClaudeGetOutput(stdout) }); + } else { + console.error('Claude CLI error:', stderr); + res.status(404).json({ error: 'Claude CLI command failed', details: stderr }); + } + }); + + process.on('error', (error) => { + console.error('Error running Claude CLI:', error); + res.status(500).json({ error: 'Failed to run Claude CLI', details: error.message }); + }); + } catch (error) { + console.error('Error getting MCP server details via CLI:', error); + res.status(500).json({ error: 'Failed to get MCP server details', details: error.message }); + } +}); + +// GET /api/mcp/config/read - Read MCP servers directly from Claude config files +router.get('/config/read', async (req, res) => { + try { + console.log('📖 Reading MCP servers from Claude config files'); + + const homeDir = os.homedir(); + const configPaths = [ + path.join(homeDir, '.claude.json'), + path.join(homeDir, '.claude', 'settings.json') + ]; + + let configData = null; + let configPath = null; + + // Try to read from either config file + for (const filepath of configPaths) { + try { + const fileContent = await fs.readFile(filepath, 'utf8'); + configData = JSON.parse(fileContent); + configPath = filepath; + console.log(`✅ Found Claude config at: ${filepath}`); + break; + } catch (error) { + // File doesn't exist or is not valid JSON, try next + console.log(`ℹ️ Config not found or invalid at: ${filepath}`); + } + } + + if (!configData) { + return res.json({ + success: false, + message: 'No Claude configuration file found', + servers: [] + }); + } + + // Extract MCP servers from the config + const servers = []; + + // Check for user-scoped MCP servers (at root level) + if (configData.mcpServers && typeof configData.mcpServers === 'object' && Object.keys(configData.mcpServers).length > 0) { + console.log('🔍 Found user-scoped MCP servers:', Object.keys(configData.mcpServers)); + for (const [name, config] of Object.entries(configData.mcpServers)) { + const server = { + id: name, + name: name, + type: 'stdio', // Default type + scope: 'user', // User scope - available across all projects + config: {}, + raw: config // Include raw config for full details + }; + + // Determine transport type and extract config + if (config.command) { + server.type = 'stdio'; + server.config.command = config.command; + server.config.args = config.args || []; + server.config.env = config.env || {}; + } else if (config.url) { + server.type = config.transport || 'http'; + server.config.url = config.url; + server.config.headers = config.headers || {}; + } + + servers.push(server); + } + } + + // Check for local-scoped MCP servers (project-specific) + const currentProjectPath = process.cwd(); + + // Check under 'projects' key + if (configData.projects && configData.projects[currentProjectPath]) { + const projectConfig = configData.projects[currentProjectPath]; + if (projectConfig.mcpServers && typeof projectConfig.mcpServers === 'object' && Object.keys(projectConfig.mcpServers).length > 0) { + console.log(`🔍 Found local-scoped MCP servers for ${currentProjectPath}:`, Object.keys(projectConfig.mcpServers)); + for (const [name, config] of Object.entries(projectConfig.mcpServers)) { + const server = { + id: `local:${name}`, // Prefix with scope for uniqueness + name: name, // Keep original name + type: 'stdio', // Default type + scope: 'local', // Local scope - only for this project + projectPath: currentProjectPath, + config: {}, + raw: config // Include raw config for full details + }; + + // Determine transport type and extract config + if (config.command) { + server.type = 'stdio'; + server.config.command = config.command; + server.config.args = config.args || []; + server.config.env = config.env || {}; + } else if (config.url) { + server.type = config.transport || 'http'; + server.config.url = config.url; + server.config.headers = config.headers || {}; + } + + servers.push(server); + } + } + } + + console.log(`📋 Found ${servers.length} MCP servers in config`); + + res.json({ + success: true, + configPath: configPath, + servers: servers + }); + } catch (error) { + console.error('Error reading Claude config:', error); + res.status(500).json({ + error: 'Failed to read Claude configuration', + details: error.message + }); + } +}); + +// Helper functions to parse Claude CLI output +function parseClaudeListOutput(output) { + const servers = []; + const lines = output.split('\n').filter(line => line.trim()); + + for (const line of lines) { + // Skip the header line + if (line.includes('Checking MCP server health')) continue; + + // Parse lines like "test: test test - ✗ Failed to connect" + // or "server-name: command or description - ✓ Connected" + if (line.includes(':')) { + const colonIndex = line.indexOf(':'); + const name = line.substring(0, colonIndex).trim(); + + // Skip empty names + if (!name) continue; + + // Extract the rest after the name + const rest = line.substring(colonIndex + 1).trim(); + + // Try to extract description and status + let description = rest; + let status = 'unknown'; + let type = 'stdio'; // default type + + // Check for status indicators + if (rest.includes('✓') || rest.includes('✗')) { + const statusMatch = rest.match(/(.*?)\s*-\s*([✓✗].*)$/); + if (statusMatch) { + description = statusMatch[1].trim(); + status = statusMatch[2].includes('✓') ? 'connected' : 'failed'; + } + } + + // Try to determine type from description + if (description.startsWith('http://') || description.startsWith('https://')) { + type = 'http'; + } + + servers.push({ + name, + type, + status: status || 'active', + description + }); + } + } + + console.log('🔍 Parsed Claude CLI servers:', servers); + return servers; +} + +function parseClaudeGetOutput(output) { + // Parse the output from 'claude mcp get ' command + // This is a simple parser - might need adjustment based on actual output format + try { + // Try to extract JSON if present + const jsonMatch = output.match(/\{[\s\S]*\}/); + if (jsonMatch) { + return JSON.parse(jsonMatch[0]); + } + + // Otherwise, parse as text + const server = { raw_output: output }; + const lines = output.split('\n'); + + for (const line of lines) { + if (line.includes('Name:')) { + server.name = line.split(':')[1]?.trim(); + } else if (line.includes('Type:')) { + server.type = line.split(':')[1]?.trim(); + } else if (line.includes('Command:')) { + server.command = line.split(':')[1]?.trim(); + } else if (line.includes('URL:')) { + server.url = line.split(':')[1]?.trim(); + } + } + + return server; + } catch (error) { + return { raw_output: output, parse_error: error.message }; + } +} + +export default router; \ No newline at end of file diff --git a/server/src/modules/messages/messages.routes.js b/server/src/modules/messages/messages.routes.js new file mode 100644 index 00000000..1ebf9da7 --- /dev/null +++ b/server/src/modules/messages/messages.routes.js @@ -0,0 +1,61 @@ +/** + * Unified messages endpoint. + * + * GET /api/sessions/:sessionId/messages?provider=claude&projectName=foo&limit=50&offset=0 + * + * Replaces the four provider-specific session message endpoints with a single route + * that delegates to the appropriate adapter via the provider registry. + * + * @module routes/messages + */ + +import express from 'express'; +import { getProvider, getAllProviders } from '../../../providers/registry.js'; + +const router = express.Router(); + +/** + * GET /api/sessions/:sessionId/messages + * + * Auth: authenticateToken applied at mount level in index.js + * + * Query params: + * provider - 'claude' | 'cursor' | 'codex' | 'gemini' (default: 'claude') + * projectName - required for claude provider + * projectPath - required for cursor provider (absolute path used for cwdId hash) + * limit - page size (omit or null for all) + * offset - pagination offset (default: 0) + */ +router.get('/:sessionId/messages', async (req, res) => { + try { + const { sessionId } = req.params; + const provider = req.query.provider || 'claude'; + const projectName = req.query.projectName || ''; + const projectPath = req.query.projectPath || ''; + const limitParam = req.query.limit; + const limit = limitParam !== undefined && limitParam !== null && limitParam !== '' + ? parseInt(limitParam, 10) + : null; + const offset = parseInt(req.query.offset || '0', 10); + + const adapter = getProvider(provider); + if (!adapter) { + const available = getAllProviders().join(', '); + return res.status(400).json({ error: `Unknown provider: ${provider}. Available: ${available}` }); + } + + const result = await adapter.fetchHistory(sessionId, { + projectName, + projectPath, + limit, + offset, + }); + + return res.json(result); + } catch (error) { + console.error('Error fetching unified messages:', error); + return res.status(500).json({ error: 'Failed to fetch messages' }); + } +}); + +export default router; diff --git a/server/src/modules/notification-preferences/notification-preferences.routes.js b/server/src/modules/notification-preferences/notification-preferences.routes.js new file mode 100644 index 00000000..4d6d1652 --- /dev/null +++ b/server/src/modules/notification-preferences/notification-preferences.routes.js @@ -0,0 +1,30 @@ +import express from 'express'; +import { notificationPreferencesDb } from '../../../database/db.js'; + +const router = express.Router(); + +// =============================== +// Notification Preferences +// =============================== + +router.get('/', async (req, res) => { + try { + const preferences = notificationPreferencesDb.getPreferences(req.user.id); + res.json({ success: true, preferences }); + } catch (error) { + console.error('Error fetching notification preferences:', error); + res.status(500).json({ error: 'Failed to fetch notification preferences' }); + } +}); + +router.put('/', async (req, res) => { + try { + const preferences = notificationPreferencesDb.updatePreferences(req.user.id, req.body || {}); + res.json({ success: true, preferences }); + } catch (error) { + console.error('Error saving notification preferences:', error); + res.status(500).json({ error: 'Failed to save notification preferences' }); + } +}); + +export default router; diff --git a/server/src/modules/plugins/plugins.routes.js b/server/src/modules/plugins/plugins.routes.js new file mode 100644 index 00000000..a5c563c1 --- /dev/null +++ b/server/src/modules/plugins/plugins.routes.js @@ -0,0 +1,307 @@ +import express from 'express'; +import path from 'path'; +import http from 'http'; +import mime from 'mime-types'; +import fs from 'fs'; +import { + scanPlugins, + getPluginsConfig, + getPluginsDir, + savePluginsConfig, + getPluginDir, + resolvePluginAssetPath, + installPluginFromGit, + updatePluginFromGit, + uninstallPlugin, +} from '../../../utils/plugin-loader.js'; +import { + startPluginServer, + stopPluginServer, + getPluginPort, + isPluginRunning, +} from '../../../utils/plugin-process-manager.js'; + +const router = express.Router(); + +// GET / — List all installed plugins (includes server running status) +router.get('/', (req, res) => { + try { + const plugins = scanPlugins().map(p => ({ + ...p, + serverRunning: p.server ? isPluginRunning(p.name) : false, + })); + res.json({ plugins }); + } catch (err) { + res.status(500).json({ error: 'Failed to scan plugins', details: err.message }); + } +}); + +// GET /:name/manifest — Get single plugin manifest +router.get('/:name/manifest', (req, res) => { + try { + if (!/^[a-zA-Z0-9_-]+$/.test(req.params.name)) { + return res.status(400).json({ error: 'Invalid plugin name' }); + } + const plugins = scanPlugins(); + const plugin = plugins.find(p => p.name === req.params.name); + if (!plugin) { + return res.status(404).json({ error: 'Plugin not found' }); + } + res.json(plugin); + } catch (err) { + res.status(500).json({ error: 'Failed to read plugin manifest', details: err.message }); + } +}); + +// GET /:name/assets/* — Serve plugin static files +router.get('/:name/assets/*', (req, res) => { + const pluginName = req.params.name; + if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) { + return res.status(400).json({ error: 'Invalid plugin name' }); + } + const assetPath = req.params[0]; + + if (!assetPath) { + return res.status(400).json({ error: 'No asset path specified' }); + } + + const resolvedPath = resolvePluginAssetPath(pluginName, assetPath); + if (!resolvedPath) { + return res.status(404).json({ error: 'Asset not found' }); + } + + try { + const stat = fs.statSync(resolvedPath); + if (!stat.isFile()) { + return res.status(404).json({ error: 'Asset not found' }); + } + } catch { + return res.status(404).json({ error: 'Asset not found' }); + } + + const contentType = mime.lookup(resolvedPath) || 'application/octet-stream'; + res.setHeader('Content-Type', contentType); + // Prevent CDN/proxy caching of plugin assets so updates take effect immediately + res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate'); + res.setHeader('Pragma', 'no-cache'); + res.setHeader('Expires', '0'); + const stream = fs.createReadStream(resolvedPath); + stream.on('error', () => { + if (!res.headersSent) { + res.status(500).json({ error: 'Failed to read asset' }); + } else { + res.end(); + } + }); + stream.pipe(res); +}); + +// PUT /:name/enable — Toggle plugin enabled/disabled (starts/stops server if applicable) +router.put('/:name/enable', async (req, res) => { + try { + const { enabled } = req.body; + if (typeof enabled !== 'boolean') { + return res.status(400).json({ error: '"enabled" must be a boolean' }); + } + + const plugins = scanPlugins(); + const plugin = plugins.find(p => p.name === req.params.name); + if (!plugin) { + return res.status(404).json({ error: 'Plugin not found' }); + } + + const config = getPluginsConfig(); + config[req.params.name] = { ...config[req.params.name], enabled }; + savePluginsConfig(config); + + // Start or stop the plugin server as needed + if (plugin.server) { + if (enabled && !isPluginRunning(plugin.name)) { + const pluginDir = getPluginDir(plugin.name); + if (pluginDir) { + try { + await startPluginServer(plugin.name, pluginDir, plugin.server); + } catch (err) { + console.error(`[Plugins] Failed to start server for "${plugin.name}":`, err.message); + } + } + } else if (!enabled && isPluginRunning(plugin.name)) { + await stopPluginServer(plugin.name); + } + } + + res.json({ success: true, name: req.params.name, enabled }); + } catch (err) { + res.status(500).json({ error: 'Failed to update plugin', details: err.message }); + } +}); + +// POST /install — Install plugin from git URL +router.post('/install', async (req, res) => { + try { + const { url } = req.body; + if (!url || typeof url !== 'string') { + return res.status(400).json({ error: '"url" is required and must be a string' }); + } + + // Basic URL validation + if (!url.startsWith('https://') && !url.startsWith('git@')) { + return res.status(400).json({ error: 'URL must start with https:// or git@' }); + } + + const manifest = await installPluginFromGit(url); + + // Auto-start the server if the plugin has one (enabled by default) + if (manifest.server) { + const pluginDir = getPluginDir(manifest.name); + if (pluginDir) { + try { + await startPluginServer(manifest.name, pluginDir, manifest.server); + } catch (err) { + console.error(`[Plugins] Failed to start server for "${manifest.name}":`, err.message); + } + } + } + + res.json({ success: true, plugin: manifest }); + } catch (err) { + res.status(400).json({ error: 'Failed to install plugin', details: err.message }); + } +}); + +// POST /:name/update — Pull latest from git (restarts server if running) +router.post('/:name/update', async (req, res) => { + try { + const pluginName = req.params.name; + + if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) { + return res.status(400).json({ error: 'Invalid plugin name' }); + } + + const wasRunning = isPluginRunning(pluginName); + if (wasRunning) { + await stopPluginServer(pluginName); + } + + const manifest = await updatePluginFromGit(pluginName); + + // Restart server if it was running before the update + if (wasRunning && manifest.server) { + const pluginDir = getPluginDir(pluginName); + if (pluginDir) { + try { + await startPluginServer(pluginName, pluginDir, manifest.server); + } catch (err) { + console.error(`[Plugins] Failed to restart server for "${pluginName}":`, err.message); + } + } + } + + res.json({ success: true, plugin: manifest }); + } catch (err) { + res.status(400).json({ error: 'Failed to update plugin', details: err.message }); + } +}); + +// ALL /:name/rpc/* — Proxy requests to plugin's server subprocess +router.all('/:name/rpc/*', async (req, res) => { + const pluginName = req.params.name; + const rpcPath = req.params[0] || ''; + + if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) { + return res.status(400).json({ error: 'Invalid plugin name' }); + } + + let port = getPluginPort(pluginName); + if (!port) { + // Lazily start the plugin server if it exists and is enabled + const plugins = scanPlugins(); + const plugin = plugins.find(p => p.name === pluginName); + if (!plugin || !plugin.server) { + return res.status(503).json({ error: 'Plugin server is not running' }); + } + if (!plugin.enabled) { + return res.status(503).json({ error: 'Plugin is disabled' }); + } + const pluginDir = path.join(getPluginsDir(), plugin.dirName); + try { + port = await startPluginServer(pluginName, pluginDir, plugin.server); + } catch (err) { + return res.status(503).json({ error: 'Plugin server failed to start', details: err.message }); + } + } + + // Inject configured secrets as headers + const config = getPluginsConfig(); + const pluginConfig = config[pluginName] || {}; + const secrets = pluginConfig.secrets || {}; + + const headers = { + 'content-type': req.headers['content-type'] || 'application/json', + }; + + // Add per-plugin user-configured secrets as X-Plugin-Secret-* headers + for (const [key, value] of Object.entries(secrets)) { + headers[`x-plugin-secret-${key.toLowerCase()}`] = String(value); + } + + // Reconstruct query string + const qs = req.url.includes('?') ? '?' + req.url.split('?').slice(1).join('?') : ''; + + const options = { + hostname: '127.0.0.1', + port, + path: `/${rpcPath}${qs}`, + method: req.method, + headers, + }; + + const proxyReq = http.request(options, (proxyRes) => { + res.writeHead(proxyRes.statusCode, proxyRes.headers); + proxyRes.pipe(res); + }); + + proxyReq.on('error', (err) => { + if (!res.headersSent) { + res.status(502).json({ error: 'Plugin server error', details: err.message }); + } else { + res.end(); + } + }); + + // Forward body (already parsed by express JSON middleware, so re-stringify). + // Check content-length to detect whether a body was actually sent, since + // req.body can be falsy for valid payloads like 0, false, null, or {}. + const hasBody = req.headers['content-length'] && parseInt(req.headers['content-length'], 10) > 0; + if (hasBody && req.body !== undefined) { + const bodyStr = JSON.stringify(req.body); + proxyReq.setHeader('content-length', Buffer.byteLength(bodyStr)); + proxyReq.write(bodyStr); + } + + proxyReq.end(); +}); + +// DELETE /:name — Uninstall plugin (stops server first) +router.delete('/:name', async (req, res) => { + try { + const pluginName = req.params.name; + + // Validate name format to prevent path traversal + if (!/^[a-zA-Z0-9_-]+$/.test(pluginName)) { + return res.status(400).json({ error: 'Invalid plugin name' }); + } + + // Stop server and wait for the process to fully exit before deleting files + if (isPluginRunning(pluginName)) { + await stopPluginServer(pluginName); + } + + await uninstallPlugin(pluginName); + res.json({ success: true, name: pluginName }); + } catch (err) { + res.status(400).json({ error: 'Failed to uninstall plugin', details: err.message }); + } +}); + +export default router; diff --git a/server/src/modules/projects/projects.routes.js b/server/src/modules/projects/projects.routes.js new file mode 100644 index 00000000..ff9b6983 --- /dev/null +++ b/server/src/modules/projects/projects.routes.js @@ -0,0 +1,548 @@ +import express from 'express'; +import { promises as fs } from 'fs'; +import path from 'path'; +import { spawn } from 'child_process'; +import os from 'os'; +import { addProjectManually } from '../../../projects.js'; + +const router = express.Router(); + +function sanitizeGitError(message, token) { + if (!message || !token) return message; + return message.replace(new RegExp(token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), '***'); +} + +// Configure allowed workspace root (defaults to user's home directory) +export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir(); + +// System-critical paths that should never be used as workspace directories +export const FORBIDDEN_PATHS = [ + // Unix + '/', + '/etc', + '/bin', + '/sbin', + '/usr', + '/dev', + '/proc', + '/sys', + '/var', + '/boot', + '/root', + '/lib', + '/lib64', + '/opt', + '/tmp', + '/run', + // Windows + 'C:\\Windows', + 'C:\\Program Files', + 'C:\\Program Files (x86)', + 'C:\\ProgramData', + 'C:\\System Volume Information', + 'C:\\$Recycle.Bin' +]; + +/** + * Validates that a path is safe for workspace operations + * @param {string} requestedPath - The path to validate + * @returns {Promise<{valid: boolean, resolvedPath?: string, error?: string}>} + */ +export async function validateWorkspacePath(requestedPath) { + try { + // Resolve to absolute path + let absolutePath = path.resolve(requestedPath); + + // Check if path is a forbidden system directory + const normalizedPath = path.normalize(absolutePath); + if (FORBIDDEN_PATHS.includes(normalizedPath) || normalizedPath === '/') { + return { + valid: false, + error: 'Cannot use system-critical directories as workspace locations' + }; + } + + // Additional check for paths starting with forbidden directories + for (const forbidden of FORBIDDEN_PATHS) { + if (normalizedPath === forbidden || + normalizedPath.startsWith(forbidden + path.sep)) { + // Exception: /var/tmp and similar user-accessible paths might be allowed + // but /var itself and most /var subdirectories should be blocked + if (forbidden === '/var' && + (normalizedPath.startsWith('/var/tmp') || + normalizedPath.startsWith('/var/folders'))) { + continue; // Allow these specific cases + } + + return { + valid: false, + error: `Cannot create workspace in system directory: ${forbidden}` + }; + } + } + + // Try to resolve the real path (following symlinks) + let realPath; + try { + // Check if path exists to resolve real path + await fs.access(absolutePath); + realPath = await fs.realpath(absolutePath); + } catch (error) { + if (error.code === 'ENOENT') { + // Path doesn't exist yet - check parent directory + let parentPath = path.dirname(absolutePath); + try { + const parentRealPath = await fs.realpath(parentPath); + + // Reconstruct the full path with real parent + realPath = path.join(parentRealPath, path.basename(absolutePath)); + } catch (parentError) { + if (parentError.code === 'ENOENT') { + // Parent doesn't exist either - use the absolute path as-is + // We'll validate it's within allowed root + realPath = absolutePath; + } else { + throw parentError; + } + } + } else { + throw error; + } + } + + // Resolve the workspace root to its real path + const resolvedWorkspaceRoot = await fs.realpath(WORKSPACES_ROOT); + + // Ensure the resolved path is contained within the allowed workspace root + if (!realPath.startsWith(resolvedWorkspaceRoot + path.sep) && + realPath !== resolvedWorkspaceRoot) { + return { + valid: false, + error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}` + }; + } + + // Additional symlink check for existing paths + try { + await fs.access(absolutePath); + const stats = await fs.lstat(absolutePath); + + if (stats.isSymbolicLink()) { + // Verify symlink target is also within allowed root + const linkTarget = await fs.readlink(absolutePath); + const resolvedTarget = path.resolve(path.dirname(absolutePath), linkTarget); + const realTarget = await fs.realpath(resolvedTarget); + + if (!realTarget.startsWith(resolvedWorkspaceRoot + path.sep) && + realTarget !== resolvedWorkspaceRoot) { + return { + valid: false, + error: 'Symlink target is outside the allowed workspace root' + }; + } + } + } catch (error) { + if (error.code !== 'ENOENT') { + throw error; + } + // Path doesn't exist - that's fine for new workspace creation + } + + return { + valid: true, + resolvedPath: realPath + }; + + } catch (error) { + return { + valid: false, + error: `Path validation failed: ${error.message}` + }; + } +} + +/** + * Create a new workspace + * POST /api/projects/create-workspace + * + * Body: + * - workspaceType: 'existing' | 'new' + * - path: string (workspace path) + * - githubUrl?: string (optional, for new workspaces) + * - githubTokenId?: number (optional, ID of stored token) + * - newGithubToken?: string (optional, one-time token) + */ +router.post('/create-workspace', async (req, res) => { + try { + const { workspaceType, path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.body; + + // Validate required fields + if (!workspaceType || !workspacePath) { + return res.status(400).json({ error: 'workspaceType and path are required' }); + } + + if (!['existing', 'new'].includes(workspaceType)) { + return res.status(400).json({ error: 'workspaceType must be "existing" or "new"' }); + } + + // Validate path safety before any operations + const validation = await validateWorkspacePath(workspacePath); + if (!validation.valid) { + return res.status(400).json({ + error: 'Invalid workspace path', + details: validation.error + }); + } + + const absolutePath = validation.resolvedPath; + + // Handle existing workspace + if (workspaceType === 'existing') { + // Check if the path exists + try { + await fs.access(absolutePath); + const stats = await fs.stat(absolutePath); + + if (!stats.isDirectory()) { + return res.status(400).json({ error: 'Path exists but is not a directory' }); + } + } catch (error) { + if (error.code === 'ENOENT') { + return res.status(404).json({ error: 'Workspace path does not exist' }); + } + throw error; + } + + // Add the existing workspace to the project list + const project = await addProjectManually(absolutePath); + + return res.json({ + success: true, + project, + message: 'Existing workspace added successfully' + }); + } + + // Handle new workspace creation + if (workspaceType === 'new') { + // Create the directory if it doesn't exist + await fs.mkdir(absolutePath, { recursive: true }); + + // If GitHub URL is provided, clone the repository + if (githubUrl) { + let githubToken = null; + + // Get GitHub token if needed + if (githubTokenId) { + // Fetch token from database + const token = await getGithubTokenById(githubTokenId, req.user.id); + if (!token) { + // Clean up created directory + await fs.rm(absolutePath, { recursive: true, force: true }); + return res.status(404).json({ error: 'GitHub token not found' }); + } + githubToken = token.github_token; + } else if (newGithubToken) { + githubToken = newGithubToken; + } + + // Extract repo name from URL for the clone destination + const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, ''); + const repoName = normalizedUrl.split('/').pop() || 'repository'; + const clonePath = path.join(absolutePath, repoName); + + // Check if clone destination already exists to prevent data loss + try { + await fs.access(clonePath); + return res.status(409).json({ + error: 'Directory already exists', + details: `The destination path "${clonePath}" already exists. Please choose a different location or remove the existing directory.` + }); + } catch (err) { + // Directory doesn't exist, which is what we want + } + + // Clone the repository into a subfolder + try { + await cloneGitHubRepository(githubUrl, clonePath, githubToken); + } catch (error) { + // Only clean up if clone created partial data (check if dir exists and is empty or partial) + try { + const stats = await fs.stat(clonePath); + if (stats.isDirectory()) { + await fs.rm(clonePath, { recursive: true, force: true }); + } + } catch (cleanupError) { + // Directory doesn't exist or cleanup failed - ignore + } + throw new Error(`Failed to clone repository: ${error.message}`); + } + + // Add the cloned repo path to the project list + const project = await addProjectManually(clonePath); + + return res.json({ + success: true, + project, + message: 'New workspace created and repository cloned successfully' + }); + } + + // Add the new workspace to the project list (no clone) + const project = await addProjectManually(absolutePath); + + return res.json({ + success: true, + project, + message: 'New workspace created successfully' + }); + } + + } catch (error) { + console.error('Error creating workspace:', error); + res.status(500).json({ + error: error.message || 'Failed to create workspace', + details: process.env.NODE_ENV === 'development' ? error.stack : undefined + }); + } +}); + +/** + * Helper function to get GitHub token from database + */ +async function getGithubTokenById(tokenId, userId) { + const { db } = await import('../../../database/db.js'); + + const credential = db.prepare( + 'SELECT * FROM user_credentials WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1' + ).get(tokenId, userId, 'github_token'); + + // Return in the expected format (github_token field for compatibility) + if (credential) { + return { + ...credential, + github_token: credential.credential_value + }; + } + + return null; +} + +/** + * Clone repository with progress streaming (SSE) + * GET /api/projects/clone-progress + */ +router.get('/clone-progress', async (req, res) => { + const { path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.query; + + res.setHeader('Content-Type', 'text/event-stream'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + res.flushHeaders(); + + const sendEvent = (type, data) => { + res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`); + }; + + try { + if (!workspacePath || !githubUrl) { + sendEvent('error', { message: 'workspacePath and githubUrl are required' }); + res.end(); + return; + } + + const validation = await validateWorkspacePath(workspacePath); + if (!validation.valid) { + sendEvent('error', { message: validation.error }); + res.end(); + return; + } + + const absolutePath = validation.resolvedPath; + + await fs.mkdir(absolutePath, { recursive: true }); + + let githubToken = null; + if (githubTokenId) { + const token = await getGithubTokenById(parseInt(githubTokenId), req.user.id); + if (!token) { + await fs.rm(absolutePath, { recursive: true, force: true }); + sendEvent('error', { message: 'GitHub token not found' }); + res.end(); + return; + } + githubToken = token.github_token; + } else if (newGithubToken) { + githubToken = newGithubToken; + } + + const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, ''); + const repoName = normalizedUrl.split('/').pop() || 'repository'; + const clonePath = path.join(absolutePath, repoName); + + // Check if clone destination already exists to prevent data loss + try { + await fs.access(clonePath); + sendEvent('error', { message: `Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.` }); + res.end(); + return; + } catch (err) { + // Directory doesn't exist, which is what we want + } + + let cloneUrl = githubUrl; + if (githubToken) { + try { + const url = new URL(githubUrl); + url.username = githubToken; + url.password = ''; + cloneUrl = url.toString(); + } catch (error) { + // SSH URL or invalid - use as-is + } + } + + sendEvent('progress', { message: `Cloning into '${repoName}'...` }); + + const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, clonePath], { + stdio: ['ignore', 'pipe', 'pipe'], + env: { + ...process.env, + GIT_TERMINAL_PROMPT: '0' + } + }); + + let lastError = ''; + + gitProcess.stdout.on('data', (data) => { + const message = data.toString().trim(); + if (message) { + sendEvent('progress', { message }); + } + }); + + gitProcess.stderr.on('data', (data) => { + const message = data.toString().trim(); + lastError = message; + if (message) { + sendEvent('progress', { message }); + } + }); + + gitProcess.on('close', async (code) => { + if (code === 0) { + try { + const project = await addProjectManually(clonePath); + sendEvent('complete', { project, message: 'Repository cloned successfully' }); + } catch (error) { + sendEvent('error', { message: `Clone succeeded but failed to add project: ${error.message}` }); + } + } else { + const sanitizedError = sanitizeGitError(lastError, githubToken); + let errorMessage = 'Git clone failed'; + if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) { + errorMessage = 'Authentication failed. Please check your credentials.'; + } else if (lastError.includes('Repository not found')) { + errorMessage = 'Repository not found. Please check the URL and ensure you have access.'; + } else if (lastError.includes('already exists')) { + errorMessage = 'Directory already exists'; + } else if (sanitizedError) { + errorMessage = sanitizedError; + } + try { + await fs.rm(clonePath, { recursive: true, force: true }); + } catch (cleanupError) { + console.error('Failed to clean up after clone failure:', sanitizeGitError(cleanupError.message, githubToken)); + } + sendEvent('error', { message: errorMessage }); + } + res.end(); + }); + + gitProcess.on('error', (error) => { + if (error.code === 'ENOENT') { + sendEvent('error', { message: 'Git is not installed or not in PATH' }); + } else { + sendEvent('error', { message: error.message }); + } + res.end(); + }); + + req.on('close', () => { + gitProcess.kill(); + }); + + } catch (error) { + sendEvent('error', { message: error.message }); + res.end(); + } +}); + +/** + * Helper function to clone a GitHub repository + */ +function cloneGitHubRepository(githubUrl, destinationPath, githubToken = null) { + return new Promise((resolve, reject) => { + let cloneUrl = githubUrl; + + if (githubToken) { + try { + const url = new URL(githubUrl); + url.username = githubToken; + url.password = ''; + cloneUrl = url.toString(); + } catch (error) { + // SSH URL - use as-is + } + } + + const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, destinationPath], { + stdio: ['ignore', 'pipe', 'pipe'], + env: { + ...process.env, + GIT_TERMINAL_PROMPT: '0' + } + }); + + let stdout = ''; + let stderr = ''; + + gitProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + gitProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + gitProcess.on('close', (code) => { + if (code === 0) { + resolve({ stdout, stderr }); + } else { + let errorMessage = 'Git clone failed'; + + if (stderr.includes('Authentication failed') || stderr.includes('could not read Username')) { + errorMessage = 'Authentication failed. Please check your GitHub token.'; + } else if (stderr.includes('Repository not found')) { + errorMessage = 'Repository not found. Please check the URL and ensure you have access.'; + } else if (stderr.includes('already exists')) { + errorMessage = 'Directory already exists'; + } else if (stderr) { + errorMessage = stderr; + } + + reject(new Error(errorMessage)); + } + }); + + gitProcess.on('error', (error) => { + if (error.code === 'ENOENT') { + reject(new Error('Git is not installed or not in PATH')); + } else { + reject(error); + } + }); + }); +} + +export default router; diff --git a/server/src/modules/push-sub/push-sub.routes.js b/server/src/modules/push-sub/push-sub.routes.js new file mode 100644 index 00000000..0bf4d881 --- /dev/null +++ b/server/src/modules/push-sub/push-sub.routes.js @@ -0,0 +1,80 @@ +import express from 'express'; +import { notificationPreferencesDb, pushSubscriptionsDb } from '../../../database/db.js'; +import { getPublicKey } from '../../../services/vapid-keys.js'; +import { createNotificationEvent, notifyUserIfEnabled } from '../../../services/notification-orchestrator.js'; + +const router = express.Router(); + +// =============================== +// Push Subscription Management +// =============================== + +router.get('/vapid-public-key', async (req, res) => { + try { + const publicKey = getPublicKey(); + res.json({ publicKey }); + } catch (error) { + console.error('Error fetching VAPID public key:', error); + res.status(500).json({ error: 'Failed to fetch VAPID public key' }); + } +}); + +router.post('/subscribe', async (req, res) => { + try { + const { endpoint, keys } = req.body; + if (!endpoint || !keys?.p256dh || !keys?.auth) { + return res.status(400).json({ error: 'Missing subscription fields' }); + } + pushSubscriptionsDb.saveSubscription(req.user.id, endpoint, keys.p256dh, keys.auth); + + // Enable webPush in preferences so the confirmation goes through the full pipeline + const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id); + if (!currentPrefs?.channels?.webPush) { + notificationPreferencesDb.updatePreferences(req.user.id, { + ...currentPrefs, + channels: { ...currentPrefs?.channels, webPush: true }, + }); + } + + res.json({ success: true }); + + // Send a confirmation push through the full notification pipeline + const event = createNotificationEvent({ + provider: 'system', + kind: 'info', + code: 'push.enabled', + meta: { message: 'Push notifications are now enabled!' }, + severity: 'info' + }); + notifyUserIfEnabled({ userId: req.user.id, event }); + } catch (error) { + console.error('Error saving push subscription:', error); + res.status(500).json({ error: 'Failed to save push subscription' }); + } +}); + +router.post('/unsubscribe', async (req, res) => { + try { + const { endpoint } = req.body; + if (!endpoint) { + return res.status(400).json({ error: 'Missing endpoint' }); + } + pushSubscriptionsDb.removeSubscription(endpoint); + + // Disable webPush in preferences to match subscription state + const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id); + if (currentPrefs?.channels?.webPush) { + notificationPreferencesDb.updatePreferences(req.user.id, { + ...currentPrefs, + channels: { ...currentPrefs.channels, webPush: false }, + }); + } + + res.json({ success: true }); + } catch (error) { + console.error('Error removing push subscription:', error); + res.status(500).json({ error: 'Failed to remove push subscription' }); + } +}); + +export default router; diff --git a/server/src/modules/settings/settings.routes.js b/server/src/modules/settings/settings.routes.js new file mode 100644 index 00000000..685d2460 --- /dev/null +++ b/server/src/modules/settings/settings.routes.js @@ -0,0 +1,276 @@ +import express from 'express'; +import { apiKeysDb, credentialsDb, notificationPreferencesDb, pushSubscriptionsDb } from '../../../database/db.js'; +import { getPublicKey } from '../../../services/vapid-keys.js'; +import { createNotificationEvent, notifyUserIfEnabled } from '../../../services/notification-orchestrator.js'; + +const router = express.Router(); + +// =============================== +// API Keys Management +// =============================== + +// Get all API keys for the authenticated user +router.get('/api-keys', async (req, res) => { + try { + const apiKeys = apiKeysDb.getApiKeys(req.user.id); + // Don't send the full API key in the list for security + const sanitizedKeys = apiKeys.map(key => ({ + ...key, + api_key: key.api_key.substring(0, 10) + '...' + })); + res.json({ apiKeys: sanitizedKeys }); + } catch (error) { + console.error('Error fetching API keys:', error); + res.status(500).json({ error: 'Failed to fetch API keys' }); + } +}); + +// Create a new API key +router.post('/api-keys', async (req, res) => { + try { + const { keyName } = req.body; + + if (!keyName || !keyName.trim()) { + return res.status(400).json({ error: 'Key name is required' }); + } + + const result = apiKeysDb.createApiKey(req.user.id, keyName.trim()); + res.json({ + success: true, + apiKey: result + }); + } catch (error) { + console.error('Error creating API key:', error); + res.status(500).json({ error: 'Failed to create API key' }); + } +}); + +// Delete an API key +router.delete('/api-keys/:keyId', async (req, res) => { + try { + const { keyId } = req.params; + const success = apiKeysDb.deleteApiKey(req.user.id, parseInt(keyId)); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'API key not found' }); + } + } catch (error) { + console.error('Error deleting API key:', error); + res.status(500).json({ error: 'Failed to delete API key' }); + } +}); + +// Toggle API key active status +router.patch('/api-keys/:keyId/toggle', async (req, res) => { + try { + const { keyId } = req.params; + const { isActive } = req.body; + + if (typeof isActive !== 'boolean') { + return res.status(400).json({ error: 'isActive must be a boolean' }); + } + + const success = apiKeysDb.toggleApiKey(req.user.id, parseInt(keyId), isActive); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'API key not found' }); + } + } catch (error) { + console.error('Error toggling API key:', error); + res.status(500).json({ error: 'Failed to toggle API key' }); + } +}); + +// =============================== +// Generic Credentials Management +// =============================== + +// Get all credentials for the authenticated user (optionally filtered by type) +router.get('/credentials', async (req, res) => { + try { + const { type } = req.query; + const credentials = credentialsDb.getCredentials(req.user.id, type || null); + // Don't send the actual credential values for security + res.json({ credentials }); + } catch (error) { + console.error('Error fetching credentials:', error); + res.status(500).json({ error: 'Failed to fetch credentials' }); + } +}); + +// Create a new credential +router.post('/credentials', async (req, res) => { + try { + const { credentialName, credentialType, credentialValue, description } = req.body; + + if (!credentialName || !credentialName.trim()) { + return res.status(400).json({ error: 'Credential name is required' }); + } + + if (!credentialType || !credentialType.trim()) { + return res.status(400).json({ error: 'Credential type is required' }); + } + + if (!credentialValue || !credentialValue.trim()) { + return res.status(400).json({ error: 'Credential value is required' }); + } + + const result = credentialsDb.createCredential( + req.user.id, + credentialName.trim(), + credentialType.trim(), + credentialValue.trim(), + description?.trim() || null + ); + + res.json({ + success: true, + credential: result + }); + } catch (error) { + console.error('Error creating credential:', error); + res.status(500).json({ error: 'Failed to create credential' }); + } +}); + +// Delete a credential +router.delete('/credentials/:credentialId', async (req, res) => { + try { + const { credentialId } = req.params; + const success = credentialsDb.deleteCredential(req.user.id, parseInt(credentialId)); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'Credential not found' }); + } + } catch (error) { + console.error('Error deleting credential:', error); + res.status(500).json({ error: 'Failed to delete credential' }); + } +}); + +// Toggle credential active status +router.patch('/credentials/:credentialId/toggle', async (req, res) => { + try { + const { credentialId } = req.params; + const { isActive } = req.body; + + if (typeof isActive !== 'boolean') { + return res.status(400).json({ error: 'isActive must be a boolean' }); + } + + const success = credentialsDb.toggleCredential(req.user.id, parseInt(credentialId), isActive); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'Credential not found' }); + } + } catch (error) { + console.error('Error toggling credential:', error); + res.status(500).json({ error: 'Failed to toggle credential' }); + } +}); + +// =============================== +// Notification Preferences +// =============================== + +router.get('/notification-preferences', async (req, res) => { + try { + const preferences = notificationPreferencesDb.getPreferences(req.user.id); + res.json({ success: true, preferences }); + } catch (error) { + console.error('Error fetching notification preferences:', error); + res.status(500).json({ error: 'Failed to fetch notification preferences' }); + } +}); + +router.put('/notification-preferences', async (req, res) => { + try { + const preferences = notificationPreferencesDb.updatePreferences(req.user.id, req.body || {}); + res.json({ success: true, preferences }); + } catch (error) { + console.error('Error saving notification preferences:', error); + res.status(500).json({ error: 'Failed to save notification preferences' }); + } +}); + +// =============================== +// Push Subscription Management +// =============================== + +router.get('/push/vapid-public-key', async (req, res) => { + try { + const publicKey = getPublicKey(); + res.json({ publicKey }); + } catch (error) { + console.error('Error fetching VAPID public key:', error); + res.status(500).json({ error: 'Failed to fetch VAPID public key' }); + } +}); + +router.post('/push/subscribe', async (req, res) => { + try { + const { endpoint, keys } = req.body; + if (!endpoint || !keys?.p256dh || !keys?.auth) { + return res.status(400).json({ error: 'Missing subscription fields' }); + } + pushSubscriptionsDb.saveSubscription(req.user.id, endpoint, keys.p256dh, keys.auth); + + // Enable webPush in preferences so the confirmation goes through the full pipeline + const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id); + if (!currentPrefs?.channels?.webPush) { + notificationPreferencesDb.updatePreferences(req.user.id, { + ...currentPrefs, + channels: { ...currentPrefs?.channels, webPush: true }, + }); + } + + res.json({ success: true }); + + // Send a confirmation push through the full notification pipeline + const event = createNotificationEvent({ + provider: 'system', + kind: 'info', + code: 'push.enabled', + meta: { message: 'Push notifications are now enabled!' }, + severity: 'info' + }); + notifyUserIfEnabled({ userId: req.user.id, event }); + } catch (error) { + console.error('Error saving push subscription:', error); + res.status(500).json({ error: 'Failed to save push subscription' }); + } +}); + +router.post('/push/unsubscribe', async (req, res) => { + try { + const { endpoint } = req.body; + if (!endpoint) { + return res.status(400).json({ error: 'Missing endpoint' }); + } + pushSubscriptionsDb.removeSubscription(endpoint); + + // Disable webPush in preferences to match subscription state + const currentPrefs = notificationPreferencesDb.getPreferences(req.user.id); + if (currentPrefs?.channels?.webPush) { + notificationPreferencesDb.updatePreferences(req.user.id, { + ...currentPrefs, + channels: { ...currentPrefs.channels, webPush: false }, + }); + } + + res.json({ success: true }); + } catch (error) { + console.error('Error removing push subscription:', error); + res.status(500).json({ error: 'Failed to remove push subscription' }); + } +}); + +export default router; diff --git a/server/src/modules/taskmaster/taskmaster.routes.js b/server/src/modules/taskmaster/taskmaster.routes.js new file mode 100644 index 00000000..f02c0bb8 --- /dev/null +++ b/server/src/modules/taskmaster/taskmaster.routes.js @@ -0,0 +1,1963 @@ +/** + * TASKMASTER API ROUTES + * ==================== + * + * This module provides API endpoints for TaskMaster integration including: + * - .taskmaster folder detection in project directories + * - MCP server configuration detection + * - TaskMaster state and metadata management + */ + +import express from 'express'; +import fs from 'fs'; +import path from 'path'; +import { promises as fsPromises } from 'fs'; +import { spawn } from 'child_process'; +import { fileURLToPath } from 'url'; +import { dirname } from 'path'; +import os from 'os'; +import { extractProjectDirectory } from '../../../projects.js'; +import { detectTaskMasterMCPServer } from '../../../utils/mcp-detector.js'; +import { broadcastTaskMasterProjectUpdate, broadcastTaskMasterTasksUpdate } from '../../../utils/taskmaster-websocket.js'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const router = express.Router(); + +/** + * Check if TaskMaster CLI is installed globally + * @returns {Promise} Installation status result + */ +async function checkTaskMasterInstallation() { + return new Promise((resolve) => { + // Check if task-master command is available + const child = spawn('which', ['task-master'], { + stdio: ['ignore', 'pipe', 'pipe'], + shell: true + }); + + let output = ''; + let errorOutput = ''; + + child.stdout.on('data', (data) => { + output += data.toString(); + }); + + child.stderr.on('data', (data) => { + errorOutput += data.toString(); + }); + + child.on('close', (code) => { + if (code === 0 && output.trim()) { + // TaskMaster is installed, get version + const versionChild = spawn('task-master', ['--version'], { + stdio: ['ignore', 'pipe', 'pipe'], + shell: true + }); + + let versionOutput = ''; + + versionChild.stdout.on('data', (data) => { + versionOutput += data.toString(); + }); + + versionChild.on('close', (versionCode) => { + resolve({ + isInstalled: true, + installPath: output.trim(), + version: versionCode === 0 ? versionOutput.trim() : 'unknown', + reason: null + }); + }); + + versionChild.on('error', () => { + resolve({ + isInstalled: true, + installPath: output.trim(), + version: 'unknown', + reason: null + }); + }); + } else { + resolve({ + isInstalled: false, + installPath: null, + version: null, + reason: 'TaskMaster CLI not found in PATH' + }); + } + }); + + child.on('error', (error) => { + resolve({ + isInstalled: false, + installPath: null, + version: null, + reason: `Error checking installation: ${error.message}` + }); + }); + }); +} + +/** + * Detect .taskmaster folder presence in a given project directory + * @param {string} projectPath - Absolute path to project directory + * @returns {Promise} Detection result with status and metadata + */ +async function detectTaskMasterFolder(projectPath) { + try { + const taskMasterPath = path.join(projectPath, '.taskmaster'); + + // Check if .taskmaster directory exists + try { + const stats = await fsPromises.stat(taskMasterPath); + if (!stats.isDirectory()) { + return { + hasTaskmaster: false, + reason: '.taskmaster exists but is not a directory' + }; + } + } catch (error) { + if (error.code === 'ENOENT') { + return { + hasTaskmaster: false, + reason: '.taskmaster directory not found' + }; + } + throw error; + } + + // Check for key TaskMaster files + const keyFiles = [ + 'tasks/tasks.json', + 'config.json' + ]; + + const fileStatus = {}; + let hasEssentialFiles = true; + + for (const file of keyFiles) { + const filePath = path.join(taskMasterPath, file); + try { + await fsPromises.access(filePath, fs.constants.R_OK); + fileStatus[file] = true; + } catch (error) { + fileStatus[file] = false; + if (file === 'tasks/tasks.json') { + hasEssentialFiles = false; + } + } + } + + // Parse tasks.json if it exists for metadata + let taskMetadata = null; + if (fileStatus['tasks/tasks.json']) { + try { + const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json'); + const tasksContent = await fsPromises.readFile(tasksPath, 'utf8'); + const tasksData = JSON.parse(tasksContent); + + // Handle both tagged and legacy formats + let tasks = []; + if (tasksData.tasks) { + // Legacy format + tasks = tasksData.tasks; + } else { + // Tagged format - get tasks from all tags + Object.values(tasksData).forEach(tagData => { + if (tagData.tasks) { + tasks = tasks.concat(tagData.tasks); + } + }); + } + + // Calculate task statistics + const stats = tasks.reduce((acc, task) => { + acc.total++; + acc[task.status] = (acc[task.status] || 0) + 1; + + // Count subtasks + if (task.subtasks) { + task.subtasks.forEach(subtask => { + acc.subtotalTasks++; + acc.subtasks = acc.subtasks || {}; + acc.subtasks[subtask.status] = (acc.subtasks[subtask.status] || 0) + 1; + }); + } + + return acc; + }, { + total: 0, + subtotalTasks: 0, + pending: 0, + 'in-progress': 0, + done: 0, + review: 0, + deferred: 0, + cancelled: 0, + subtasks: {} + }); + + taskMetadata = { + taskCount: stats.total, + subtaskCount: stats.subtotalTasks, + completed: stats.done || 0, + pending: stats.pending || 0, + inProgress: stats['in-progress'] || 0, + review: stats.review || 0, + completionPercentage: stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0, + lastModified: (await fsPromises.stat(tasksPath)).mtime.toISOString() + }; + } catch (parseError) { + console.warn('Failed to parse tasks.json:', parseError.message); + taskMetadata = { error: 'Failed to parse tasks.json' }; + } + } + + return { + hasTaskmaster: true, + hasEssentialFiles, + files: fileStatus, + metadata: taskMetadata, + path: taskMasterPath + }; + + } catch (error) { + console.error('Error detecting TaskMaster folder:', error); + return { + hasTaskmaster: false, + reason: `Error checking directory: ${error.message}` + }; + } +} + +// MCP detection is now handled by the centralized utility + +// API Routes + +/** + * GET /api/taskmaster/installation-status + * Check if TaskMaster CLI is installed on the system + */ +router.get('/installation-status', async (req, res) => { + try { + const installationStatus = await checkTaskMasterInstallation(); + + // Also check for MCP server configuration + const mcpStatus = await detectTaskMasterMCPServer(); + + res.json({ + success: true, + installation: installationStatus, + mcpServer: mcpStatus, + isReady: installationStatus.isInstalled && mcpStatus.hasMCPServer + }); + } catch (error) { + console.error('Error checking TaskMaster installation:', error); + res.status(500).json({ + success: false, + error: 'Failed to check TaskMaster installation status', + installation: { + isInstalled: false, + reason: `Server error: ${error.message}` + }, + mcpServer: { + hasMCPServer: false, + reason: `Server error: ${error.message}` + }, + isReady: false + }); + } +}); + +/** + * GET /api/taskmaster/detect/:projectName + * Detect TaskMaster configuration for a specific project + */ +router.get('/detect/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + + // Use the existing extractProjectDirectory function to get actual project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + console.error('Error extracting project directory:', error); + return res.status(404).json({ + error: 'Project path not found', + projectName, + message: error.message + }); + } + + // Verify the project path exists + try { + await fsPromises.access(projectPath, fs.constants.R_OK); + } catch (error) { + return res.status(404).json({ + error: 'Project path not accessible', + projectPath, + projectName, + message: error.message + }); + } + + // Run detection in parallel + const [taskMasterResult, mcpResult] = await Promise.all([ + detectTaskMasterFolder(projectPath), + detectTaskMasterMCPServer() + ]); + + // Determine overall status + let status = 'not-configured'; + if (taskMasterResult.hasTaskmaster && taskMasterResult.hasEssentialFiles) { + if (mcpResult.hasMCPServer && mcpResult.isConfigured) { + status = 'fully-configured'; + } else { + status = 'taskmaster-only'; + } + } else if (mcpResult.hasMCPServer && mcpResult.isConfigured) { + status = 'mcp-only'; + } + + const responseData = { + projectName, + projectPath, + status, + taskmaster: taskMasterResult, + mcp: mcpResult, + timestamp: new Date().toISOString() + }; + + res.json(responseData); + + } catch (error) { + console.error('TaskMaster detection error:', error); + res.status(500).json({ + error: 'Failed to detect TaskMaster configuration', + message: error.message + }); + } +}); + +/** + * GET /api/taskmaster/detect-all + * Detect TaskMaster configuration for all known projects + * This endpoint works with the existing projects system + */ +router.get('/detect-all', async (req, res) => { + try { + // Import getProjects from the projects module + const { getProjects } = await import('../../../projects.js'); + const projects = await getProjects(); + + // Run detection for all projects in parallel + const detectionPromises = projects.map(async (project) => { + try { + // Use the project's fullPath if available, otherwise extract the directory + let projectPath; + if (project.fullPath) { + projectPath = project.fullPath; + } else { + try { + projectPath = await extractProjectDirectory(project.name); + } catch (error) { + throw new Error(`Failed to extract project directory: ${error.message}`); + } + } + + const [taskMasterResult, mcpResult] = await Promise.all([ + detectTaskMasterFolder(projectPath), + detectTaskMasterMCPServer() + ]); + + // Determine status + let status = 'not-configured'; + if (taskMasterResult.hasTaskmaster && taskMasterResult.hasEssentialFiles) { + if (mcpResult.hasMCPServer && mcpResult.isConfigured) { + status = 'fully-configured'; + } else { + status = 'taskmaster-only'; + } + } else if (mcpResult.hasMCPServer && mcpResult.isConfigured) { + status = 'mcp-only'; + } + + return { + projectName: project.name, + displayName: project.displayName, + projectPath, + status, + taskmaster: taskMasterResult, + mcp: mcpResult + }; + } catch (error) { + return { + projectName: project.name, + displayName: project.displayName, + status: 'error', + error: error.message + }; + } + }); + + const results = await Promise.all(detectionPromises); + + res.json({ + projects: results, + summary: { + total: results.length, + fullyConfigured: results.filter(p => p.status === 'fully-configured').length, + taskmasterOnly: results.filter(p => p.status === 'taskmaster-only').length, + mcpOnly: results.filter(p => p.status === 'mcp-only').length, + notConfigured: results.filter(p => p.status === 'not-configured').length, + errors: results.filter(p => p.status === 'error').length + }, + timestamp: new Date().toISOString() + }); + + } catch (error) { + console.error('Bulk TaskMaster detection error:', error); + res.status(500).json({ + error: 'Failed to detect TaskMaster configuration for projects', + message: error.message + }); + } +}); + +/** + * POST /api/taskmaster/initialize/:projectName + * Initialize TaskMaster in a project (placeholder for future CLI integration) + */ +router.post('/initialize/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + const { rules } = req.body; // Optional rule profiles + + // This will be implemented in a later subtask with CLI integration + res.status(501).json({ + error: 'TaskMaster initialization not yet implemented', + message: 'This endpoint will execute task-master init via CLI in a future update', + projectName, + rules + }); + + } catch (error) { + console.error('TaskMaster initialization error:', error); + res.status(500).json({ + error: 'Failed to initialize TaskMaster', + message: error.message + }); + } +}); + +/** + * GET /api/taskmaster/next/:projectName + * Get the next recommended task using task-master CLI + */ +router.get('/next/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + // Try to execute task-master next command + try { + const { spawn } = await import('child_process'); + + const nextTaskCommand = spawn('task-master', ['next'], { + cwd: projectPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + nextTaskCommand.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + nextTaskCommand.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + await new Promise((resolve, reject) => { + nextTaskCommand.on('close', (code) => { + if (code === 0) { + resolve(); + } else { + reject(new Error(`task-master next failed with code ${code}: ${stderr}`)); + } + }); + + nextTaskCommand.on('error', (error) => { + reject(error); + }); + }); + + // Parse the output - task-master next usually returns JSON + let nextTaskData = null; + if (stdout.trim()) { + try { + nextTaskData = JSON.parse(stdout); + } catch (parseError) { + // If not JSON, treat as plain text + nextTaskData = { message: stdout.trim() }; + } + } + + res.json({ + projectName, + projectPath, + nextTask: nextTaskData, + timestamp: new Date().toISOString() + }); + + } catch (cliError) { + console.warn('Failed to execute task-master CLI:', cliError.message); + + // Fallback to loading tasks and finding next one locally + // Use localhost to bypass proxy for internal server-to-server calls + const tasksResponse = await fetch(`http://localhost:${process.env.SERVER_PORT || process.env.PORT || '3001'}/api/taskmaster/tasks/${encodeURIComponent(projectName)}`, { + headers: { + 'Authorization': req.headers.authorization + } + }); + + if (tasksResponse.ok) { + const tasksData = await tasksResponse.json(); + const nextTask = tasksData.tasks?.find(task => + task.status === 'pending' || task.status === 'in-progress' + ) || null; + + res.json({ + projectName, + projectPath, + nextTask, + fallback: true, + message: 'Used fallback method (CLI not available)', + timestamp: new Date().toISOString() + }); + } else { + throw new Error('Failed to load tasks via fallback method'); + } + } + + } catch (error) { + console.error('TaskMaster next task error:', error); + res.status(500).json({ + error: 'Failed to get next task', + message: error.message + }); + } +}); + +/** + * GET /api/taskmaster/tasks/:projectName + * Load actual tasks from .taskmaster/tasks/tasks.json + */ +router.get('/tasks/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + const taskMasterPath = path.join(projectPath, '.taskmaster'); + const tasksFilePath = path.join(taskMasterPath, 'tasks', 'tasks.json'); + + // Check if tasks file exists + try { + await fsPromises.access(tasksFilePath); + } catch (error) { + return res.json({ + projectName, + tasks: [], + message: 'No tasks.json file found' + }); + } + + // Read and parse tasks file + try { + const tasksContent = await fsPromises.readFile(tasksFilePath, 'utf8'); + const tasksData = JSON.parse(tasksContent); + + let tasks = []; + let currentTag = 'master'; + + // Handle both tagged and legacy formats + if (Array.isArray(tasksData)) { + // Legacy format + tasks = tasksData; + } else if (tasksData.tasks) { + // Simple format with tasks array + tasks = tasksData.tasks; + } else { + // Tagged format - get tasks from current tag or master + if (tasksData[currentTag] && tasksData[currentTag].tasks) { + tasks = tasksData[currentTag].tasks; + } else if (tasksData.master && tasksData.master.tasks) { + tasks = tasksData.master.tasks; + } else { + // Get tasks from first available tag + const firstTag = Object.keys(tasksData).find(key => + tasksData[key].tasks && Array.isArray(tasksData[key].tasks) + ); + if (firstTag) { + tasks = tasksData[firstTag].tasks; + currentTag = firstTag; + } + } + } + + // Transform tasks to ensure all have required fields + const transformedTasks = tasks.map(task => ({ + id: task.id, + title: task.title || 'Untitled Task', + description: task.description || '', + status: task.status || 'pending', + priority: task.priority || 'medium', + dependencies: task.dependencies || [], + createdAt: task.createdAt || task.created || new Date().toISOString(), + updatedAt: task.updatedAt || task.updated || new Date().toISOString(), + details: task.details || '', + testStrategy: task.testStrategy || task.test_strategy || '', + subtasks: task.subtasks || [] + })); + + res.json({ + projectName, + projectPath, + tasks: transformedTasks, + currentTag, + totalTasks: transformedTasks.length, + tasksByStatus: { + pending: transformedTasks.filter(t => t.status === 'pending').length, + 'in-progress': transformedTasks.filter(t => t.status === 'in-progress').length, + done: transformedTasks.filter(t => t.status === 'done').length, + review: transformedTasks.filter(t => t.status === 'review').length, + deferred: transformedTasks.filter(t => t.status === 'deferred').length, + cancelled: transformedTasks.filter(t => t.status === 'cancelled').length + }, + timestamp: new Date().toISOString() + }); + + } catch (parseError) { + console.error('Failed to parse tasks.json:', parseError); + return res.status(500).json({ + error: 'Failed to parse tasks file', + message: parseError.message + }); + } + + } catch (error) { + console.error('TaskMaster tasks loading error:', error); + res.status(500).json({ + error: 'Failed to load TaskMaster tasks', + message: error.message + }); + } +}); + +/** + * GET /api/taskmaster/prd/:projectName + * List all PRD files in the project's .taskmaster/docs directory + */ +router.get('/prd/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + const docsPath = path.join(projectPath, '.taskmaster', 'docs'); + + // Check if docs directory exists + try { + await fsPromises.access(docsPath, fs.constants.R_OK); + } catch (error) { + return res.json({ + projectName, + prdFiles: [], + message: 'No .taskmaster/docs directory found' + }); + } + + // Read directory and filter for PRD files + try { + const files = await fsPromises.readdir(docsPath); + const prdFiles = []; + + for (const file of files) { + const filePath = path.join(docsPath, file); + const stats = await fsPromises.stat(filePath); + + if (stats.isFile() && (file.endsWith('.txt') || file.endsWith('.md'))) { + prdFiles.push({ + name: file, + path: path.relative(projectPath, filePath), + size: stats.size, + modified: stats.mtime.toISOString(), + created: stats.birthtime.toISOString() + }); + } + } + + res.json({ + projectName, + projectPath, + prdFiles: prdFiles.sort((a, b) => new Date(b.modified) - new Date(a.modified)), + timestamp: new Date().toISOString() + }); + + } catch (readError) { + console.error('Error reading docs directory:', readError); + return res.status(500).json({ + error: 'Failed to read PRD files', + message: readError.message + }); + } + + } catch (error) { + console.error('PRD list error:', error); + res.status(500).json({ + error: 'Failed to list PRD files', + message: error.message + }); + } +}); + +/** + * POST /api/taskmaster/prd/:projectName + * Create or update a PRD file in the project's .taskmaster/docs directory + */ +router.post('/prd/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + const { fileName, content } = req.body; + + if (!fileName || !content) { + return res.status(400).json({ + error: 'Missing required fields', + message: 'fileName and content are required' + }); + } + + // Validate filename + if (!fileName.match(/^[\w\-. ]+\.(txt|md)$/)) { + return res.status(400).json({ + error: 'Invalid filename', + message: 'Filename must end with .txt or .md and contain only alphanumeric characters, spaces, dots, and dashes' + }); + } + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + const docsPath = path.join(projectPath, '.taskmaster', 'docs'); + const filePath = path.join(docsPath, fileName); + + // Ensure docs directory exists + try { + await fsPromises.mkdir(docsPath, { recursive: true }); + } catch (error) { + console.error('Failed to create docs directory:', error); + return res.status(500).json({ + error: 'Failed to create directory', + message: error.message + }); + } + + // Write the PRD file + try { + await fsPromises.writeFile(filePath, content, 'utf8'); + + // Get file stats + const stats = await fsPromises.stat(filePath); + + res.json({ + projectName, + projectPath, + fileName, + filePath: path.relative(projectPath, filePath), + size: stats.size, + created: stats.birthtime.toISOString(), + modified: stats.mtime.toISOString(), + message: 'PRD file saved successfully', + timestamp: new Date().toISOString() + }); + + } catch (writeError) { + console.error('Failed to write PRD file:', writeError); + return res.status(500).json({ + error: 'Failed to write PRD file', + message: writeError.message + }); + } + + } catch (error) { + console.error('PRD create/update error:', error); + res.status(500).json({ + error: 'Failed to create/update PRD file', + message: error.message + }); + } +}); + +/** + * GET /api/taskmaster/prd/:projectName/:fileName + * Get content of a specific PRD file + */ +router.get('/prd/:projectName/:fileName', async (req, res) => { + try { + const { projectName, fileName } = req.params; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + const filePath = path.join(projectPath, '.taskmaster', 'docs', fileName); + + // Check if file exists + try { + await fsPromises.access(filePath, fs.constants.R_OK); + } catch (error) { + return res.status(404).json({ + error: 'PRD file not found', + message: `File "${fileName}" does not exist` + }); + } + + // Read file content + try { + const content = await fsPromises.readFile(filePath, 'utf8'); + const stats = await fsPromises.stat(filePath); + + res.json({ + projectName, + projectPath, + fileName, + filePath: path.relative(projectPath, filePath), + content, + size: stats.size, + created: stats.birthtime.toISOString(), + modified: stats.mtime.toISOString(), + timestamp: new Date().toISOString() + }); + + } catch (readError) { + console.error('Failed to read PRD file:', readError); + return res.status(500).json({ + error: 'Failed to read PRD file', + message: readError.message + }); + } + + } catch (error) { + console.error('PRD read error:', error); + res.status(500).json({ + error: 'Failed to read PRD file', + message: error.message + }); + } +}); + +/** + * DELETE /api/taskmaster/prd/:projectName/:fileName + * Delete a specific PRD file + */ +router.delete('/prd/:projectName/:fileName', async (req, res) => { + try { + const { projectName, fileName } = req.params; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + const filePath = path.join(projectPath, '.taskmaster', 'docs', fileName); + + // Check if file exists + try { + await fsPromises.access(filePath, fs.constants.F_OK); + } catch (error) { + return res.status(404).json({ + error: 'PRD file not found', + message: `File "${fileName}" does not exist` + }); + } + + // Delete the file + try { + await fsPromises.unlink(filePath); + + res.json({ + projectName, + projectPath, + fileName, + message: 'PRD file deleted successfully', + timestamp: new Date().toISOString() + }); + + } catch (deleteError) { + console.error('Failed to delete PRD file:', deleteError); + return res.status(500).json({ + error: 'Failed to delete PRD file', + message: deleteError.message + }); + } + + } catch (error) { + console.error('PRD delete error:', error); + res.status(500).json({ + error: 'Failed to delete PRD file', + message: error.message + }); + } +}); + +/** + * POST /api/taskmaster/init/:projectName + * Initialize TaskMaster in a project + */ +router.post('/init/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + // Check if TaskMaster is already initialized + const taskMasterPath = path.join(projectPath, '.taskmaster'); + try { + await fsPromises.access(taskMasterPath, fs.constants.F_OK); + return res.status(400).json({ + error: 'TaskMaster already initialized', + message: 'TaskMaster is already configured for this project' + }); + } catch (error) { + // Directory doesn't exist, we can proceed + } + + // Run taskmaster init command + const initProcess = spawn('npx', ['task-master', 'init'], { + cwd: projectPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + initProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + initProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + initProcess.on('close', (code) => { + if (code === 0) { + // Broadcast TaskMaster project update via WebSocket + if (req.app.locals.wss) { + broadcastTaskMasterProjectUpdate( + req.app.locals.wss, + projectName, + { hasTaskmaster: true, status: 'initialized' } + ); + } + + res.json({ + projectName, + projectPath, + message: 'TaskMaster initialized successfully', + output: stdout, + timestamp: new Date().toISOString() + }); + } else { + console.error('TaskMaster init failed:', stderr); + res.status(500).json({ + error: 'Failed to initialize TaskMaster', + message: stderr || stdout, + code + }); + } + }); + + // Send 'yes' responses to automated prompts + initProcess.stdin.write('yes\n'); + initProcess.stdin.end(); + + } catch (error) { + console.error('TaskMaster init error:', error); + res.status(500).json({ + error: 'Failed to initialize TaskMaster', + message: error.message + }); + } +}); + +/** + * POST /api/taskmaster/add-task/:projectName + * Add a new task to the project + */ +router.post('/add-task/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + const { prompt, title, description, priority = 'medium', dependencies } = req.body; + + if (!prompt && (!title || !description)) { + return res.status(400).json({ + error: 'Missing required parameters', + message: 'Either "prompt" or both "title" and "description" are required' + }); + } + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + // Build the task-master add-task command + const args = ['task-master-ai', 'add-task']; + + if (prompt) { + args.push('--prompt', prompt); + args.push('--research'); // Use research for AI-generated tasks + } else { + args.push('--prompt', `Create a task titled "${title}" with description: ${description}`); + } + + if (priority) { + args.push('--priority', priority); + } + + if (dependencies) { + args.push('--dependencies', dependencies); + } + + // Run task-master add-task command + const addTaskProcess = spawn('npx', args, { + cwd: projectPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + addTaskProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + addTaskProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + addTaskProcess.on('close', (code) => { + console.log('Add task process completed with code:', code); + console.log('Stdout:', stdout); + console.log('Stderr:', stderr); + + if (code === 0) { + // Broadcast task update via WebSocket + if (req.app.locals.wss) { + broadcastTaskMasterTasksUpdate( + req.app.locals.wss, + projectName + ); + } + + res.json({ + projectName, + projectPath, + message: 'Task added successfully', + output: stdout, + timestamp: new Date().toISOString() + }); + } else { + console.error('Add task failed:', stderr); + res.status(500).json({ + error: 'Failed to add task', + message: stderr || stdout, + code + }); + } + }); + + addTaskProcess.stdin.end(); + + } catch (error) { + console.error('Add task error:', error); + res.status(500).json({ + error: 'Failed to add task', + message: error.message + }); + } +}); + +/** + * PUT /api/taskmaster/update-task/:projectName/:taskId + * Update a specific task using TaskMaster CLI + */ +router.put('/update-task/:projectName/:taskId', async (req, res) => { + try { + const { projectName, taskId } = req.params; + const { title, description, status, priority, details } = req.body; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + // If only updating status, use set-status command + if (status && Object.keys(req.body).length === 1) { + const setStatusProcess = spawn('npx', ['task-master-ai', 'set-status', `--id=${taskId}`, `--status=${status}`], { + cwd: projectPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + setStatusProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + setStatusProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + setStatusProcess.on('close', (code) => { + if (code === 0) { + // Broadcast task update via WebSocket + if (req.app.locals.wss) { + broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectName); + } + + res.json({ + projectName, + projectPath, + taskId, + message: 'Task status updated successfully', + output: stdout, + timestamp: new Date().toISOString() + }); + } else { + console.error('Set task status failed:', stderr); + res.status(500).json({ + error: 'Failed to update task status', + message: stderr || stdout, + code + }); + } + }); + + setStatusProcess.stdin.end(); + } else { + // For other updates, use update-task command with a prompt describing the changes + const updates = []; + if (title) updates.push(`title: "${title}"`); + if (description) updates.push(`description: "${description}"`); + if (priority) updates.push(`priority: "${priority}"`); + if (details) updates.push(`details: "${details}"`); + + const prompt = `Update task with the following changes: ${updates.join(', ')}`; + + const updateProcess = spawn('npx', ['task-master-ai', 'update-task', `--id=${taskId}`, `--prompt=${prompt}`], { + cwd: projectPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + updateProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + updateProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + updateProcess.on('close', (code) => { + if (code === 0) { + // Broadcast task update via WebSocket + if (req.app.locals.wss) { + broadcastTaskMasterTasksUpdate(req.app.locals.wss, projectName); + } + + res.json({ + projectName, + projectPath, + taskId, + message: 'Task updated successfully', + output: stdout, + timestamp: new Date().toISOString() + }); + } else { + console.error('Update task failed:', stderr); + res.status(500).json({ + error: 'Failed to update task', + message: stderr || stdout, + code + }); + } + }); + + updateProcess.stdin.end(); + } + + } catch (error) { + console.error('Update task error:', error); + res.status(500).json({ + error: 'Failed to update task', + message: error.message + }); + } +}); + +/** + * POST /api/taskmaster/parse-prd/:projectName + * Parse a PRD file to generate tasks + */ +router.post('/parse-prd/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + const { fileName = 'prd.txt', numTasks, append = false } = req.body; + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + const prdPath = path.join(projectPath, '.taskmaster', 'docs', fileName); + + // Check if PRD file exists + try { + await fsPromises.access(prdPath, fs.constants.F_OK); + } catch (error) { + return res.status(404).json({ + error: 'PRD file not found', + message: `File "${fileName}" does not exist in .taskmaster/docs/` + }); + } + + // Build the command args + const args = ['task-master-ai', 'parse-prd', prdPath]; + + if (numTasks) { + args.push('--num-tasks', numTasks.toString()); + } + + if (append) { + args.push('--append'); + } + + args.push('--research'); // Use research for better PRD parsing + + // Run task-master parse-prd command + const parsePRDProcess = spawn('npx', args, { + cwd: projectPath, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + let stdout = ''; + let stderr = ''; + + parsePRDProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + parsePRDProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + parsePRDProcess.on('close', (code) => { + if (code === 0) { + // Broadcast task update via WebSocket + if (req.app.locals.wss) { + broadcastTaskMasterTasksUpdate( + req.app.locals.wss, + projectName + ); + } + + res.json({ + projectName, + projectPath, + prdFile: fileName, + message: 'PRD parsed and tasks generated successfully', + output: stdout, + timestamp: new Date().toISOString() + }); + } else { + console.error('Parse PRD failed:', stderr); + res.status(500).json({ + error: 'Failed to parse PRD', + message: stderr || stdout, + code + }); + } + }); + + parsePRDProcess.stdin.end(); + + } catch (error) { + console.error('Parse PRD error:', error); + res.status(500).json({ + error: 'Failed to parse PRD', + message: error.message + }); + } +}); + +/** + * GET /api/taskmaster/prd-templates + * Get available PRD templates + */ +router.get('/prd-templates', async (req, res) => { + try { + // Return built-in templates + const templates = [ + { + id: 'web-app', + name: 'Web Application', + description: 'Template for web application projects with frontend and backend components', + category: 'web', + content: `# Product Requirements Document - Web Application + +## Overview +**Product Name:** [Your App Name] +**Version:** 1.0 +**Date:** ${new Date().toISOString().split('T')[0]} +**Author:** [Your Name] + +## Executive Summary +Brief description of what this web application will do and why it's needed. + +## Product Goals +- Goal 1: [Specific measurable goal] +- Goal 2: [Specific measurable goal] +- Goal 3: [Specific measurable goal] + +## User Stories +### Core Features +1. **User Registration & Authentication** + - As a user, I want to create an account so I can access personalized features + - As a user, I want to log in securely so my data is protected + - As a user, I want to reset my password if I forget it + +2. **Main Application Features** + - As a user, I want to [core feature 1] so I can [benefit] + - As a user, I want to [core feature 2] so I can [benefit] + - As a user, I want to [core feature 3] so I can [benefit] + +3. **User Interface** + - As a user, I want a responsive design so I can use the app on any device + - As a user, I want intuitive navigation so I can easily find features + +## Technical Requirements +### Frontend +- Framework: React/Vue/Angular or vanilla JavaScript +- Styling: CSS framework (Tailwind, Bootstrap, etc.) +- State Management: Redux/Vuex/Context API +- Build Tools: Webpack/Vite +- Testing: Jest/Vitest for unit tests + +### Backend +- Runtime: Node.js/Python/Java +- Database: PostgreSQL/MySQL/MongoDB +- API: RESTful API or GraphQL +- Authentication: JWT tokens +- Testing: Integration and unit tests + +### Infrastructure +- Hosting: Cloud provider (AWS, Azure, GCP) +- CI/CD: GitHub Actions/GitLab CI +- Monitoring: Application monitoring tools +- Security: HTTPS, input validation, rate limiting + +## Success Metrics +- User engagement metrics +- Performance benchmarks (load time < 2s) +- Error rates < 1% +- User satisfaction scores + +## Timeline +- Phase 1: Core functionality (4-6 weeks) +- Phase 2: Advanced features (2-4 weeks) +- Phase 3: Polish and launch (2 weeks) + +## Constraints & Assumptions +- Budget constraints +- Technical limitations +- Team size and expertise +- Timeline constraints` + }, + { + id: 'api', + name: 'REST API', + description: 'Template for REST API development projects', + category: 'backend', + content: `# Product Requirements Document - REST API + +## Overview +**API Name:** [Your API Name] +**Version:** v1.0 +**Date:** ${new Date().toISOString().split('T')[0]} +**Author:** [Your Name] + +## Executive Summary +Description of the API's purpose, target users, and primary use cases. + +## API Goals +- Goal 1: Provide secure data access +- Goal 2: Ensure scalable architecture +- Goal 3: Maintain high availability (99.9% uptime) + +## Functional Requirements +### Core Endpoints +1. **Authentication Endpoints** + - POST /api/auth/login - User authentication + - POST /api/auth/logout - User logout + - POST /api/auth/refresh - Token refresh + - POST /api/auth/register - User registration + +2. **Data Management Endpoints** + - GET /api/resources - List resources with pagination + - GET /api/resources/{id} - Get specific resource + - POST /api/resources - Create new resource + - PUT /api/resources/{id} - Update existing resource + - DELETE /api/resources/{id} - Delete resource + +3. **Administrative Endpoints** + - GET /api/admin/users - Manage users (admin only) + - GET /api/admin/analytics - System analytics + - POST /api/admin/backup - Trigger system backup + +## Technical Requirements +### API Design +- RESTful architecture following OpenAPI 3.0 specification +- JSON request/response format +- Consistent error response format +- API versioning strategy + +### Authentication & Security +- JWT token-based authentication +- Role-based access control (RBAC) +- Rate limiting (100 requests/minute per user) +- Input validation and sanitization +- HTTPS enforcement + +### Database +- Database type: [PostgreSQL/MongoDB/MySQL] +- Connection pooling +- Database migrations +- Backup and recovery procedures + +### Performance Requirements +- Response time: < 200ms for 95% of requests +- Throughput: 1000+ requests/second +- Concurrent users: 10,000+ +- Database query optimization + +### Documentation +- Auto-generated API documentation (Swagger/OpenAPI) +- Code examples for common use cases +- SDK development for major languages +- Postman collection for testing + +## Error Handling +- Standardized error codes and messages +- Proper HTTP status codes +- Detailed error logging +- Graceful degradation strategies + +## Testing Strategy +- Unit tests (80%+ coverage) +- Integration tests for all endpoints +- Load testing and performance testing +- Security testing (OWASP compliance) + +## Monitoring & Logging +- Application performance monitoring +- Error tracking and alerting +- Access logs and audit trails +- Health check endpoints + +## Deployment +- Containerized deployment (Docker) +- CI/CD pipeline setup +- Environment management (dev, staging, prod) +- Blue-green deployment strategy + +## Success Metrics +- API uptime > 99.9% +- Average response time < 200ms +- Zero critical security vulnerabilities +- Developer adoption metrics` + }, + { + id: 'mobile-app', + name: 'Mobile Application', + description: 'Template for mobile app development projects (iOS/Android)', + category: 'mobile', + content: `# Product Requirements Document - Mobile Application + +## Overview +**App Name:** [Your App Name] +**Platform:** iOS / Android / Cross-platform +**Version:** 1.0 +**Date:** ${new Date().toISOString().split('T')[0]} +**Author:** [Your Name] + +## Executive Summary +Brief description of the mobile app's purpose, target audience, and key value proposition. + +## Product Goals +- Goal 1: [Specific user engagement goal] +- Goal 2: [Specific functionality goal] +- Goal 3: [Specific performance goal] + +## User Stories +### Core Features +1. **Onboarding & Authentication** + - As a new user, I want a simple onboarding process + - As a user, I want to sign up with email or social media + - As a user, I want biometric authentication for security + +2. **Main App Features** + - As a user, I want [core feature 1] accessible from home screen + - As a user, I want [core feature 2] to work offline + - As a user, I want to sync data across devices + +3. **User Experience** + - As a user, I want intuitive navigation patterns + - As a user, I want fast loading times + - As a user, I want accessibility features + +## Technical Requirements +### Mobile Development +- **Cross-platform:** React Native / Flutter / Xamarin +- **Native:** Swift (iOS) / Kotlin (Android) +- **State Management:** Redux / MobX / Provider +- **Navigation:** React Navigation / Flutter Navigation + +### Backend Integration +- REST API or GraphQL integration +- Real-time features (WebSockets/Push notifications) +- Offline data synchronization +- Background processing + +### Device Features +- Camera and photo library access +- GPS location services +- Push notifications +- Biometric authentication +- Device storage + +### Performance Requirements +- App launch time < 3 seconds +- Screen transition animations < 300ms +- Memory usage optimization +- Battery usage optimization + +## Platform-Specific Considerations +### iOS Requirements +- iOS 13.0+ minimum version +- App Store guidelines compliance +- iOS design guidelines (Human Interface Guidelines) +- TestFlight beta testing + +### Android Requirements +- Android 8.0+ (API level 26) minimum +- Google Play Store guidelines +- Material Design guidelines +- Google Play Console testing + +## User Interface Design +- Responsive design for different screen sizes +- Dark mode support +- Accessibility compliance (WCAG 2.1) +- Consistent design system + +## Security & Privacy +- Secure data storage (Keychain/Keystore) +- API communication encryption +- Privacy policy compliance (GDPR/CCPA) +- App security best practices + +## Testing Strategy +- Unit testing (80%+ coverage) +- UI/E2E testing (Detox/Appium) +- Device testing on multiple screen sizes +- Performance testing +- Security testing + +## App Store Deployment +- App store optimization (ASO) +- App icons and screenshots +- Store listing content +- Release management strategy + +## Analytics & Monitoring +- User analytics (Firebase/Analytics) +- Crash reporting (Crashlytics/Sentry) +- Performance monitoring +- User feedback collection + +## Success Metrics +- App store ratings > 4.0 +- User retention rates +- Daily/Monthly active users +- App performance metrics +- Conversion rates` + }, + { + id: 'data-analysis', + name: 'Data Analysis Project', + description: 'Template for data analysis and visualization projects', + category: 'data', + content: `# Product Requirements Document - Data Analysis Project + +## Overview +**Project Name:** [Your Analysis Project] +**Analysis Type:** [Descriptive/Predictive/Prescriptive] +**Date:** ${new Date().toISOString().split('T')[0]} +**Author:** [Your Name] + +## Executive Summary +Description of the business problem, data sources, and expected insights. + +## Project Goals +- Goal 1: [Specific business question to answer] +- Goal 2: [Specific prediction to make] +- Goal 3: [Specific recommendation to provide] + +## Business Requirements +### Key Questions +1. What patterns exist in the current data? +2. What factors influence [target variable]? +3. What predictions can be made for [future outcome]? +4. What recommendations can improve [business metric]? + +### Success Criteria +- Actionable insights for stakeholders +- Statistical significance in findings +- Reproducible analysis pipeline +- Clear visualization and reporting + +## Data Requirements +### Data Sources +1. **Primary Data** + - Source: [Database/API/Files] + - Format: [CSV/JSON/SQL] + - Size: [Volume estimate] + - Update frequency: [Real-time/Daily/Monthly] + +2. **External Data** + - Third-party APIs + - Public datasets + - Market research data + +### Data Quality Requirements +- Data completeness (< 5% missing values) +- Data accuracy validation +- Data consistency checks +- Historical data availability + +## Technical Requirements +### Data Pipeline +- Data extraction and ingestion +- Data cleaning and preprocessing +- Data transformation and feature engineering +- Data validation and quality checks + +### Analysis Tools +- **Programming:** Python/R/SQL +- **Libraries:** pandas, numpy, scikit-learn, matplotlib +- **Visualization:** Tableau, PowerBI, or custom dashboards +- **Version Control:** Git for code and DVC for data + +### Computing Resources +- Local development environment +- Cloud computing (AWS/GCP/Azure) if needed +- Database access and permissions +- Storage requirements + +## Analysis Methodology +### Data Exploration +1. Descriptive statistics and data profiling +2. Data visualization and pattern identification +3. Correlation analysis +4. Outlier detection and handling + +### Statistical Analysis +1. Hypothesis formulation +2. Statistical testing +3. Confidence intervals +4. Effect size calculations + +### Machine Learning (if applicable) +1. Feature selection and engineering +2. Model selection and training +3. Cross-validation and evaluation +4. Model interpretation and explainability + +## Deliverables +### Reports +- Executive summary for stakeholders +- Technical analysis report +- Data quality report +- Methodology documentation + +### Visualizations +- Interactive dashboards +- Static charts and graphs +- Data story presentations +- Key findings infographics + +### Code & Documentation +- Reproducible analysis scripts +- Data pipeline code +- Documentation and comments +- Testing and validation code + +## Timeline +- Phase 1: Data collection and exploration (2 weeks) +- Phase 2: Analysis and modeling (3 weeks) +- Phase 3: Reporting and visualization (1 week) +- Phase 4: Stakeholder presentation (1 week) + +## Risks & Assumptions +- Data availability and quality risks +- Technical complexity assumptions +- Resource and timeline constraints +- Stakeholder engagement assumptions + +## Success Metrics +- Stakeholder satisfaction with insights +- Accuracy of predictions (if applicable) +- Business impact of recommendations +- Reproducibility of results` + } + ]; + + res.json({ + templates, + timestamp: new Date().toISOString() + }); + + } catch (error) { + console.error('PRD templates error:', error); + res.status(500).json({ + error: 'Failed to get PRD templates', + message: error.message + }); + } +}); + +/** + * POST /api/taskmaster/apply-template/:projectName + * Apply a PRD template to create a new PRD file + */ +router.post('/apply-template/:projectName', async (req, res) => { + try { + const { projectName } = req.params; + const { templateId, fileName = 'prd.txt', customizations = {} } = req.body; + + if (!templateId) { + return res.status(400).json({ + error: 'Missing required parameter', + message: 'templateId is required' + }); + } + + // Get project path + let projectPath; + try { + projectPath = await extractProjectDirectory(projectName); + } catch (error) { + return res.status(404).json({ + error: 'Project not found', + message: `Project "${projectName}" does not exist` + }); + } + + // Get the template content (this would normally fetch from the templates list) + const templates = await getAvailableTemplates(); + const template = templates.find(t => t.id === templateId); + + if (!template) { + return res.status(404).json({ + error: 'Template not found', + message: `Template "${templateId}" does not exist` + }); + } + + // Apply customizations to template content + let content = template.content; + + // Replace placeholders with customizations + for (const [key, value] of Object.entries(customizations)) { + const placeholder = `[${key}]`; + content = content.replace(new RegExp(placeholder.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&'), 'g'), value); + } + + // Ensure .taskmaster/docs directory exists + const docsDir = path.join(projectPath, '.taskmaster', 'docs'); + try { + await fsPromises.mkdir(docsDir, { recursive: true }); + } catch (error) { + console.error('Failed to create docs directory:', error); + } + + const filePath = path.join(docsDir, fileName); + + // Write the template content to the file + try { + await fsPromises.writeFile(filePath, content, 'utf8'); + + res.json({ + projectName, + projectPath, + templateId, + templateName: template.name, + fileName, + filePath: filePath, + message: 'PRD template applied successfully', + timestamp: new Date().toISOString() + }); + + } catch (writeError) { + console.error('Failed to write PRD template:', writeError); + return res.status(500).json({ + error: 'Failed to write PRD template', + message: writeError.message + }); + } + + } catch (error) { + console.error('Apply template error:', error); + res.status(500).json({ + error: 'Failed to apply PRD template', + message: error.message + }); + } +}); + +// Helper function to get available templates +async function getAvailableTemplates() { + // This could be extended to read from files or database + return [ + { + id: 'web-app', + name: 'Web Application', + description: 'Template for web application projects', + category: 'web', + content: `# Product Requirements Document - Web Application + +## Overview +**Product Name:** [Your App Name] +**Version:** 1.0 +**Date:** ${new Date().toISOString().split('T')[0]} +**Author:** [Your Name] + +## Executive Summary +Brief description of what this web application will do and why it's needed. + +## User Stories +1. As a user, I want [feature] so I can [benefit] +2. As a user, I want [feature] so I can [benefit] +3. As a user, I want [feature] so I can [benefit] + +## Technical Requirements +- Frontend framework +- Backend services +- Database requirements +- Security considerations + +## Success Metrics +- User engagement metrics +- Performance benchmarks +- Business objectives` + }, + // Add other templates here if needed + ]; +} + +export default router; diff --git a/server/src/runner.ts b/server/src/runner.ts index 63253a67..3404638f 100644 --- a/server/src/runner.ts +++ b/server/src/runner.ts @@ -13,7 +13,7 @@ import { getConnectableHost } from '@/shared/utils/networkHosts.js'; import { logger } from '@/shared/utils/logger.js'; import { authRoutes } from '@/modules/auth/auth.routes.js'; import { userRoutes } from '@/modules/user/user.routes.js'; -import { authenticateToken } from '@/modules/auth/auth.middleware.js'; +import { validateApiKey, authenticateToken } from '@/modules/auth/auth.middleware.js'; const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); @@ -33,6 +33,52 @@ const HOST = process.env.HOST || '0.0.0.0'; const DISPLAY_HOST = getConnectableHost(HOST); const VITE_PORT = process.env.VITE_PORT || 5173; +async function importRoute(relativePath: string): Promise { + const moduleUrl = new URL(relativePath, import.meta.url); + const routeModule = await import(moduleUrl.href); + return routeModule.default; +} + +const [ + gitRoutes, + mcpRoutes, + cursorRoutes, + taskmasterRoutes, + mcpUtilsRoutes, + commandsRoutes, + settingsRoutes, + apiKeysRoutes, + credentialsRoutes, + notificationPreferencesRoutes, + pushSubRoutes, + agentRoutes, + projectsRoutes, + cliAuthRoutes, + codexRoutes, + geminiRoutes, + pluginsRoutes, + messagesRoutes, +] = await Promise.all([ + importRoute('./modules/git/git.routes.js'), + importRoute('./modules/mcp/mcp.routes.js'), + importRoute('./modules/cursor/cursor.routes.js'), + importRoute('./modules/taskmaster/taskmaster.routes.js'), + importRoute('./modules/mcp-utils/mcp-utils.routes.js'), + importRoute('./modules/commands/commands.routes.js'), + importRoute('./modules/settings/settings.routes.js'), + importRoute('./modules/api-keys/api-keys.routes.js'), + importRoute('./modules/credentials/credentials.routes.js'), + importRoute('./modules/notification-preferences/notification-preferences.routes.js'), + importRoute('./modules/push-sub/push-sub.routes.js'), + importRoute('./modules/agent/agent.routes.js'), + importRoute('./modules/projects/projects.routes.js'), + importRoute('./modules/cli-auth/cli-auth.routes.js'), + importRoute('./modules/codex/codex.routes.js'), + importRoute('./modules/gemini/gemini.routes.js'), + importRoute('./modules/plugins/plugins.routes.js'), + importRoute('./modules/messages/messages.routes.js'), +]); + // ---------- MIDDLEWARES ---------------- app.use(cors({ exposedHeaders: ['X-Refreshed-Token'] })); app.use(express.json({ @@ -60,12 +106,63 @@ app.use((req, res, next) => { }); +// Optional API key validation (if configured) +app.use('/api', validateApiKey); + // Authentication routes (public) app.use('/api/auth', authRoutes); +// Projects API Routes (protected) +app.use('/api/projects', authenticateToken, projectsRoutes); + +// Git API Routes (protected) +app.use('/api/git', authenticateToken, gitRoutes); + +// MCP API Routes (protected) +app.use('/api/mcp', authenticateToken, mcpRoutes); + +// Cursor API Routes (protected) +app.use('/api/cursor', authenticateToken, cursorRoutes); + +// TaskMaster API Routes (protected) +app.use('/api/taskmaster', authenticateToken, taskmasterRoutes); + +// MCP utilities +app.use('/api/mcp-utils', authenticateToken, mcpUtilsRoutes); + +// Commands API Routes (protected) +app.use('/api/commands', authenticateToken, commandsRoutes); + +// Settings API Routes (protected, legacy endpoint) +app.use('/api/settings', authenticateToken, settingsRoutes); + +// Settings sub-modules API Routes (protected) +app.use('/api/api-keys', authenticateToken, apiKeysRoutes); +app.use('/api/credentials', authenticateToken, credentialsRoutes); +app.use('/api/notification-preferences', authenticateToken, notificationPreferencesRoutes); +app.use('/api/push-sub', authenticateToken, pushSubRoutes); + +// CLI Authentication API Routes (protected) +app.use('/api/cli', authenticateToken, cliAuthRoutes); + // User API Routes (protected) app.use('/api/user', authenticateToken, userRoutes); +// Codex API Routes (protected) +app.use('/api/codex', authenticateToken, codexRoutes); + +// Gemini API Routes (protected) +app.use('/api/gemini', authenticateToken, geminiRoutes); + +// Plugins API Routes (protected) +app.use('/api/plugins', authenticateToken, pluginsRoutes); + +// Unified session messages route (protected) +app.use('/api/sessions', authenticateToken, messagesRoutes); + +// Agent API Routes (uses API key authentication) +app.use('/api/agent', agentRoutes); + // This matches files found in the root public folder (like api-docs.html when we run `/api-docs.html`). // If the file is found, it's automatically sent. If it is not, it passes it to the next route checker. // This will run in production as well as development URLs. @@ -145,4 +242,4 @@ async function main() { } } -await main(); \ No newline at end of file +await main(); diff --git a/src/App.tsx b/src/App.tsx index 2aeec6f4..986f3f7c 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -66,7 +66,6 @@ export default function App() { // // } /> // } /> -// } /> // // // diff --git a/src/components/project-creation-wizard/data/workspaceApi.ts b/src/components/project-creation-wizard/data/workspaceApi.ts index f4ca3baf..5ffbe94a 100644 --- a/src/components/project-creation-wizard/data/workspaceApi.ts +++ b/src/components/project-creation-wizard/data/workspaceApi.ts @@ -28,7 +28,7 @@ const parseJson = async (response: Response): Promise => { }; export const fetchGithubTokenCredentials = async () => { - const response = await api.get('/settings/credentials?type=github_token'); + const response = await api.get('/credentials?type=github_token'); const data = await parseJson(response); if (!response.ok) { diff --git a/src/components/settings/hooks/useCredentialsSettings.ts b/src/components/settings/hooks/useCredentialsSettings.ts index 4a9afc45..98acfdad 100644 --- a/src/components/settings/hooks/useCredentialsSettings.ts +++ b/src/components/settings/hooks/useCredentialsSettings.ts @@ -43,8 +43,8 @@ export function useCredentialsSettings({ setLoading(true); const [apiKeysResponse, credentialsResponse] = await Promise.all([ - authenticatedFetch('/api/settings/api-keys'), - authenticatedFetch('/api/settings/credentials?type=github_token'), + authenticatedFetch('/api/api-keys'), + authenticatedFetch('/api/credentials?type=github_token'), ]); const [apiKeysPayload, credentialsPayload] = await Promise.all([ @@ -67,7 +67,7 @@ export function useCredentialsSettings({ } try { - const response = await authenticatedFetch('/api/settings/api-keys', { + const response = await authenticatedFetch('/api/api-keys', { method: 'POST', body: JSON.stringify({ keyName: newKeyName.trim() }), }); @@ -95,7 +95,7 @@ export function useCredentialsSettings({ } try { - const response = await authenticatedFetch(`/api/settings/api-keys/${keyId}`, { + const response = await authenticatedFetch(`/api/api-keys/${keyId}`, { method: 'DELETE', }); @@ -113,7 +113,7 @@ export function useCredentialsSettings({ const toggleApiKey = useCallback(async (keyId: string, isActive: boolean) => { try { - const response = await authenticatedFetch(`/api/settings/api-keys/${keyId}/toggle`, { + const response = await authenticatedFetch(`/api/api-keys/${keyId}/toggle`, { method: 'PATCH', body: JSON.stringify({ isActive: !isActive }), }); @@ -136,7 +136,7 @@ export function useCredentialsSettings({ } try { - const response = await authenticatedFetch('/api/settings/credentials', { + const response = await authenticatedFetch('/api/credentials', { method: 'POST', body: JSON.stringify({ credentialName: newGithubName.trim(), @@ -169,7 +169,7 @@ export function useCredentialsSettings({ } try { - const response = await authenticatedFetch(`/api/settings/credentials/${credentialId}`, { + const response = await authenticatedFetch(`/api/credentials/${credentialId}`, { method: 'DELETE', }); @@ -187,7 +187,7 @@ export function useCredentialsSettings({ const toggleGithubCredential = useCallback(async (credentialId: string, isActive: boolean) => { try { - const response = await authenticatedFetch(`/api/settings/credentials/${credentialId}/toggle`, { + const response = await authenticatedFetch(`/api/credentials/${credentialId}/toggle`, { method: 'PATCH', body: JSON.stringify({ isActive: !isActive }), }); diff --git a/src/components/settings/hooks/useSettingsController.ts b/src/components/settings/hooks/useSettingsController.ts index 293cbceb..26e3828b 100644 --- a/src/components/settings/hooks/useSettingsController.ts +++ b/src/components/settings/hooks/useSettingsController.ts @@ -692,7 +692,7 @@ export function useSettingsController({ isOpen, initialTab, projects, onClose }: setGeminiPermissionMode(savedGeminiSettings.permissionMode || 'default'); try { - const notificationResponse = await authenticatedFetch('/api/settings/notification-preferences'); + const notificationResponse = await authenticatedFetch('/api/notification-preferences'); if (notificationResponse.ok) { const notificationData = await toResponseJson(notificationResponse); if (notificationData.success && notificationData.preferences) { @@ -767,7 +767,7 @@ export function useSettingsController({ isOpen, initialTab, projects, onClose }: lastUpdated: now, })); - const notificationResponse = await authenticatedFetch('/api/settings/notification-preferences', { + const notificationResponse = await authenticatedFetch('/api/notification-preferences', { method: 'PUT', body: JSON.stringify(notificationPreferences), }); diff --git a/src/hooks/useWebPush.ts b/src/hooks/useWebPush.ts index b5e365ae..b0b84770 100644 --- a/src/hooks/useWebPush.ts +++ b/src/hooks/useWebPush.ts @@ -52,7 +52,7 @@ export function useWebPush(): WebPushState { setPermission(perm); if (perm !== 'granted') return; - const keyRes = await authenticatedFetch('/api/settings/push/vapid-public-key'); + const keyRes = await authenticatedFetch('/api/push-sub/vapid-public-key'); const { publicKey } = await keyRes.json(); const registration = await navigator.serviceWorker.ready; @@ -62,7 +62,7 @@ export function useWebPush(): WebPushState { }); const subJson = subscription.toJSON(); - await authenticatedFetch('/api/settings/push/subscribe', { + await authenticatedFetch('/api/push-sub/subscribe', { method: 'POST', body: JSON.stringify({ endpoint: subJson.endpoint, @@ -86,7 +86,7 @@ export function useWebPush(): WebPushState { if (subscription) { const endpoint = subscription.endpoint; await subscription.unsubscribe(); - await authenticatedFetch('/api/settings/push/unsubscribe', { + await authenticatedFetch('/api/push-sub/unsubscribe', { method: 'POST', body: JSON.stringify({ endpoint }), });