mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-02-28 11:37:39 +00:00
feat: Google's gemini-cli integration (#422)
* feat: integrate Gemini AI agent provider - Core Backend: Ported gemini-cli.js and gemini-response-handler.js to establish the CLI bridge. Registered 'gemini' as an active provider within index.js. - Core Frontend: Extended QuickSettingsPanel.jsx, Settings.jsx, and AgentListItem.jsx to render the Gemini provider option, models (gemini-pro, gemini-flash, etc.), and handle OAuth states. - WebSocket Pipeline: Added support for gemini-command executions in backend and payload processing of gemini-response and gemini-error streams in useChatRealtimeHandlers.ts. Resolved JSON double-stringification and sessionId stripping issues in the transmission handler. - Platform Compatibility: Added scripts/fix-node-pty.js postinstall script and modified posix_spawnp calls with sh -c wrapper to prevent ENOEXEC and MacOS permission errors when spawning the gemini headless binary. - UX & Design: Imported official Google Gemini branding via GeminiLogo.jsx and gemini-ai-icon.svg. Updated translations (chat.json) for en, zh-CN, and ko locales. * fix: propagate gemini permission mode from settings to cli - Added Gemini Permissions UI in Settings to toggle Auto Edit and YOLO modes - Synced gemini permission mode to localStorage - Passed permissionMode in useChatComposerState for Gemini commands - Mapped frontend permission modes to --yolo and --approval-mode options in gemini-cli.js * feat(gemini): Refactor Gemini CLI integration to use stream-json - Replaced regex buffering text-system with NDJSON stream parsing - Added fallback for restricted models like gemini-3.1-pro-preview * feat(gemini): Render tool_use and tool_result UI bubbles - Forwarded gemini tool NDJSON objects to the websocket - Added React state handlers in useChatRealtimeHandlers to match Claude's tool UI behavior * feat(gemini): Add native session resumption and UI token tracking - Captured cliSessionId from init events to map ClaudeCodeUI's chat sessionId directly into Gemini's internal session manager. - Updated gemini-cli.js spawn arguments to append the --resume proxy flag instead of naively dumping the accumulated chat history into the command prompt. - Handled result stream objects by proxying total_tokens back into the frontend's claude-status tracker to natively populate the UI label. - Eliminated gemini-3 model proxy filter entirely. * fix(gemini): Fix static 'Claude' name rendering in chat UI header - Added "gemini": "Gemini" translation strings to messageTypes across English, Korean, and Chinese loc dictionaries. - Updated AssistantThinkingIndicator and MessageComponent ternary checks to identify provider === 'gemini' and render the appropriate brand label instead of statically defaulting to Claude. * feat: Add Gemini session persistence API mapping and Sidebar UI * fix(gemini): Watch ~/.gemini/sessions for live UI updates Added the .gemini/sessions directory to PROVIDER_WATCH_PATHS so that Chokidar emits projects_updated websocket events when new Gemini sessions are created or modified, fixing live sidebar updates. * fix(gemini): Fix Gemini authentication status display in Settings UI - Injected 'checkGeminiAuthStatus' into the Settings.jsx React effect hook so that the UI can poll and render the 'geminiAuthStatus' state. - Updated 'checkGeminiCredentials()' inside server/routes/cli-auth.js to read from '~/.gemini/oauth_creds.json' and '~/.gemini/google_accounts.json', resolving the email address correctly. * Use logo-only icon for gemini * feat(gemini): Add Gemini 3 preview models to UI selection list * Fix Gemini CLI session resume bug and PR #422 review nitpicks * Fix Gemini tool calls disappearing from UI after completion * fix(gemini): resolve outstanding PR #422 feedback and stabilize gemini CLI timeouts * fix(gemini): resolve resume flag and shell session initialization issues This commit addresses the remaining PR comments for the Gemini CLI integration: - Moves the `--resume` flag logic outside the prompt command block, ensuring Gemini sessions correctly resume even when a new prompt isn't passed. - Updates `handleShellConnection` to correctly lookup the native `cliSessionId` from the internal `sessionId` when spawning Gemini sessions in a plain shell. - Refactors dynamic import of `sessionManager.js` back to a native static import for code consistency. * chore: fix TypeScript errors and remove gemini CLI dependency * fix: use cross-spawn on Windows to resolve gemini.cmd correctly --------- Co-authored-by: Haileyesus <118998054+blackmammoth@users.noreply.github.com>
This commit is contained in:
455
server/gemini-cli.js
Normal file
455
server/gemini-cli.js
Normal file
@@ -0,0 +1,455 @@
|
||||
import { spawn } from 'child_process';
|
||||
import crossSpawn from 'cross-spawn';
|
||||
|
||||
// Use cross-spawn on Windows for correct .cmd resolution (same pattern as cursor-cli.js)
|
||||
const spawnFunction = process.platform === 'win32' ? crossSpawn : spawn;
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { getSessions, getSessionMessages } from './projects.js';
|
||||
import sessionManager from './sessionManager.js';
|
||||
import GeminiResponseHandler from './gemini-response-handler.js';
|
||||
|
||||
let activeGeminiProcesses = new Map(); // Track active processes by session ID
|
||||
|
||||
async function spawnGemini(command, options = {}, ws) {
|
||||
const { sessionId, projectPath, cwd, resume, toolsSettings, permissionMode, images } = options;
|
||||
let capturedSessionId = sessionId; // Track session ID throughout the process
|
||||
let sessionCreatedSent = false; // Track if we've already sent session-created event
|
||||
let assistantBlocks = []; // Accumulate the full response blocks including tools
|
||||
|
||||
// Use tools settings passed from frontend, or defaults
|
||||
const settings = toolsSettings || {
|
||||
allowedTools: [],
|
||||
disallowedTools: [],
|
||||
skipPermissions: false
|
||||
};
|
||||
|
||||
// Build Gemini CLI command - start with print/resume flags first
|
||||
const args = [];
|
||||
|
||||
// Add prompt flag with command if we have a command
|
||||
if (command && command.trim()) {
|
||||
args.push('--prompt', command);
|
||||
}
|
||||
|
||||
// If we have a sessionId, we want to resume
|
||||
if (sessionId) {
|
||||
const session = sessionManager.getSession(sessionId);
|
||||
if (session && session.cliSessionId) {
|
||||
args.push('--resume', session.cliSessionId);
|
||||
}
|
||||
}
|
||||
|
||||
// Use cwd (actual project directory) instead of projectPath (Gemini's metadata directory)
|
||||
// Clean the path by removing any non-printable characters
|
||||
const cleanPath = (cwd || projectPath || process.cwd()).replace(/[^\x20-\x7E]/g, '').trim();
|
||||
const workingDir = cleanPath;
|
||||
|
||||
// Handle images by saving them to temporary files and passing paths to Gemini
|
||||
const tempImagePaths = [];
|
||||
let tempDir = null;
|
||||
if (images && images.length > 0) {
|
||||
try {
|
||||
// Create temp directory in the project directory so Gemini can access it
|
||||
tempDir = path.join(workingDir, '.tmp', 'images', Date.now().toString());
|
||||
await fs.mkdir(tempDir, { recursive: true });
|
||||
|
||||
// Save each image to a temp file
|
||||
for (const [index, image] of images.entries()) {
|
||||
// Extract base64 data and mime type
|
||||
const matches = image.data.match(/^data:([^;]+);base64,(.+)$/);
|
||||
if (!matches) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const [, mimeType, base64Data] = matches;
|
||||
const extension = mimeType.split('/')[1] || 'png';
|
||||
const filename = `image_${index}.${extension}`;
|
||||
const filepath = path.join(tempDir, filename);
|
||||
|
||||
// Write base64 data to file
|
||||
await fs.writeFile(filepath, Buffer.from(base64Data, 'base64'));
|
||||
tempImagePaths.push(filepath);
|
||||
}
|
||||
|
||||
// Include the full image paths in the prompt for Gemini to reference
|
||||
// Gemini CLI can read images from file paths in the prompt
|
||||
if (tempImagePaths.length > 0 && command && command.trim()) {
|
||||
const imageNote = `\n\n[Images given: ${tempImagePaths.length} images are located at the following paths:]\n${tempImagePaths.map((p, i) => `${i + 1}. ${p}`).join('\n')}`;
|
||||
const modifiedCommand = command + imageNote;
|
||||
|
||||
// Update the command in args
|
||||
const promptIndex = args.indexOf('--prompt');
|
||||
if (promptIndex !== -1 && args[promptIndex + 1] === command) {
|
||||
args[promptIndex + 1] = modifiedCommand;
|
||||
} else if (promptIndex !== -1) {
|
||||
// If we're using context, update the full prompt
|
||||
args[promptIndex + 1] = args[promptIndex + 1] + imageNote;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error processing images for Gemini:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Add basic flags for Gemini
|
||||
if (options.debug) {
|
||||
args.push('--debug');
|
||||
}
|
||||
|
||||
// Add MCP config flag only if MCP servers are configured
|
||||
try {
|
||||
const geminiConfigPath = path.join(os.homedir(), '.gemini.json');
|
||||
let hasMcpServers = false;
|
||||
|
||||
try {
|
||||
await fs.access(geminiConfigPath);
|
||||
const geminiConfigRaw = await fs.readFile(geminiConfigPath, 'utf8');
|
||||
const geminiConfig = JSON.parse(geminiConfigRaw);
|
||||
|
||||
// Check global MCP servers
|
||||
if (geminiConfig.mcpServers && Object.keys(geminiConfig.mcpServers).length > 0) {
|
||||
hasMcpServers = true;
|
||||
}
|
||||
|
||||
// Check project-specific MCP servers
|
||||
if (!hasMcpServers && geminiConfig.geminiProjects) {
|
||||
const currentProjectPath = process.cwd();
|
||||
const projectConfig = geminiConfig.geminiProjects[currentProjectPath];
|
||||
if (projectConfig && projectConfig.mcpServers && Object.keys(projectConfig.mcpServers).length > 0) {
|
||||
hasMcpServers = true;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore if file doesn't exist or isn't parsable
|
||||
}
|
||||
|
||||
if (hasMcpServers) {
|
||||
args.push('--mcp-config', geminiConfigPath);
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore outer errors
|
||||
}
|
||||
|
||||
// Add model for all sessions (both new and resumed)
|
||||
let modelToUse = options.model || 'gemini-2.5-flash';
|
||||
args.push('--model', modelToUse);
|
||||
args.push('--output-format', 'stream-json');
|
||||
|
||||
// Handle approval modes and allowed tools
|
||||
if (settings.skipPermissions || options.skipPermissions || permissionMode === 'yolo') {
|
||||
args.push('--yolo');
|
||||
} else if (permissionMode === 'auto_edit') {
|
||||
args.push('--approval-mode', 'auto_edit');
|
||||
} else if (permissionMode === 'plan') {
|
||||
args.push('--approval-mode', 'plan');
|
||||
}
|
||||
|
||||
if (settings.allowedTools && settings.allowedTools.length > 0) {
|
||||
args.push('--allowed-tools', settings.allowedTools.join(','));
|
||||
}
|
||||
|
||||
// Try to find gemini in PATH first, then fall back to environment variable
|
||||
const geminiPath = process.env.GEMINI_PATH || 'gemini';
|
||||
console.log('Spawning Gemini CLI:', geminiPath, args.join(' '));
|
||||
console.log('Working directory:', workingDir);
|
||||
|
||||
let spawnCmd = geminiPath;
|
||||
let spawnArgs = args;
|
||||
|
||||
// On non-Windows platforms, wrap the execution in a shell to avoid ENOEXEC
|
||||
// which happens when the target is a script lacking a shebang.
|
||||
if (os.platform() !== 'win32') {
|
||||
spawnCmd = 'sh';
|
||||
// Use exec to replace the shell process, ensuring signals hit gemini directly
|
||||
spawnArgs = ['-c', 'exec "$0" "$@"', geminiPath, ...args];
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const geminiProcess = spawnFunction(spawnCmd, spawnArgs, {
|
||||
cwd: workingDir,
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
env: { ...process.env } // Inherit all environment variables
|
||||
});
|
||||
|
||||
// Attach temp file info to process for cleanup later
|
||||
geminiProcess.tempImagePaths = tempImagePaths;
|
||||
geminiProcess.tempDir = tempDir;
|
||||
|
||||
// Store process reference for potential abort
|
||||
const processKey = capturedSessionId || sessionId || Date.now().toString();
|
||||
activeGeminiProcesses.set(processKey, geminiProcess);
|
||||
|
||||
// Store sessionId on the process object for debugging
|
||||
geminiProcess.sessionId = processKey;
|
||||
|
||||
// Close stdin to signal we're done sending input
|
||||
geminiProcess.stdin.end();
|
||||
|
||||
// Add timeout handler
|
||||
let hasReceivedOutput = false;
|
||||
const timeoutMs = 120000; // 120 seconds for slower models
|
||||
let timeout;
|
||||
|
||||
const startTimeout = () => {
|
||||
if (timeout) clearTimeout(timeout);
|
||||
timeout = setTimeout(() => {
|
||||
const socketSessionId = typeof ws.getSessionId === 'function' ? ws.getSessionId() : (capturedSessionId || sessionId || processKey);
|
||||
ws.send({
|
||||
type: 'gemini-error',
|
||||
sessionId: socketSessionId,
|
||||
error: `Gemini CLI timeout - no response received for ${timeoutMs / 1000} seconds`
|
||||
});
|
||||
try {
|
||||
geminiProcess.kill('SIGTERM');
|
||||
} catch (e) { }
|
||||
}, timeoutMs);
|
||||
};
|
||||
|
||||
startTimeout();
|
||||
|
||||
// Save user message to session when starting
|
||||
if (command && capturedSessionId) {
|
||||
sessionManager.addMessage(capturedSessionId, 'user', command);
|
||||
}
|
||||
|
||||
// Create response handler for NDJSON buffering
|
||||
let responseHandler;
|
||||
if (ws) {
|
||||
responseHandler = new GeminiResponseHandler(ws, {
|
||||
onContentFragment: (content) => {
|
||||
if (assistantBlocks.length > 0 && assistantBlocks[assistantBlocks.length - 1].type === 'text') {
|
||||
assistantBlocks[assistantBlocks.length - 1].text += content;
|
||||
} else {
|
||||
assistantBlocks.push({ type: 'text', text: content });
|
||||
}
|
||||
},
|
||||
onToolUse: (event) => {
|
||||
assistantBlocks.push({
|
||||
type: 'tool_use',
|
||||
id: event.tool_id,
|
||||
name: event.tool_name,
|
||||
input: event.parameters
|
||||
});
|
||||
},
|
||||
onToolResult: (event) => {
|
||||
if (capturedSessionId) {
|
||||
if (assistantBlocks.length > 0) {
|
||||
sessionManager.addMessage(capturedSessionId, 'assistant', [...assistantBlocks]);
|
||||
assistantBlocks = [];
|
||||
}
|
||||
sessionManager.addMessage(capturedSessionId, 'user', [{
|
||||
type: 'tool_result',
|
||||
tool_use_id: event.tool_id,
|
||||
content: event.output === undefined ? null : event.output,
|
||||
is_error: event.status === 'error'
|
||||
}]);
|
||||
}
|
||||
},
|
||||
onInit: (event) => {
|
||||
if (capturedSessionId) {
|
||||
const sess = sessionManager.getSession(capturedSessionId);
|
||||
if (sess && !sess.cliSessionId) {
|
||||
sess.cliSessionId = event.session_id;
|
||||
sessionManager.saveSession(capturedSessionId);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle stdout
|
||||
geminiProcess.stdout.on('data', (data) => {
|
||||
const rawOutput = data.toString();
|
||||
hasReceivedOutput = true;
|
||||
startTimeout(); // Re-arm the timeout
|
||||
|
||||
// For new sessions, create a session ID FIRST
|
||||
if (!sessionId && !sessionCreatedSent && !capturedSessionId) {
|
||||
capturedSessionId = `gemini_${Date.now()}`;
|
||||
sessionCreatedSent = true;
|
||||
|
||||
// Create session in session manager
|
||||
sessionManager.createSession(capturedSessionId, cwd || process.cwd());
|
||||
|
||||
// Save the user message now that we have a session ID
|
||||
if (command) {
|
||||
sessionManager.addMessage(capturedSessionId, 'user', command);
|
||||
}
|
||||
|
||||
// Update process key with captured session ID
|
||||
if (processKey !== capturedSessionId) {
|
||||
activeGeminiProcesses.delete(processKey);
|
||||
activeGeminiProcesses.set(capturedSessionId, geminiProcess);
|
||||
}
|
||||
|
||||
ws.setSessionId && typeof ws.setSessionId === 'function' && ws.setSessionId(capturedSessionId);
|
||||
|
||||
ws.send({
|
||||
type: 'session-created',
|
||||
sessionId: capturedSessionId
|
||||
});
|
||||
|
||||
// Emit fake system init so the frontend immediately navigates and saves the session
|
||||
ws.send({
|
||||
type: 'claude-response',
|
||||
sessionId: capturedSessionId,
|
||||
data: {
|
||||
type: 'system',
|
||||
subtype: 'init',
|
||||
session_id: capturedSessionId
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (responseHandler) {
|
||||
responseHandler.processData(rawOutput);
|
||||
} else if (rawOutput) {
|
||||
// Fallback to direct sending for raw CLI mode without WS
|
||||
if (assistantBlocks.length > 0 && assistantBlocks[assistantBlocks.length - 1].type === 'text') {
|
||||
assistantBlocks[assistantBlocks.length - 1].text += rawOutput;
|
||||
} else {
|
||||
assistantBlocks.push({ type: 'text', text: rawOutput });
|
||||
}
|
||||
const socketSessionId = typeof ws.getSessionId === 'function' ? ws.getSessionId() : (capturedSessionId || sessionId);
|
||||
ws.send({
|
||||
type: 'gemini-response',
|
||||
sessionId: socketSessionId,
|
||||
data: {
|
||||
type: 'message',
|
||||
content: rawOutput
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle stderr
|
||||
geminiProcess.stderr.on('data', (data) => {
|
||||
const errorMsg = data.toString();
|
||||
|
||||
// Filter out deprecation warnings and "Loaded cached credentials" message
|
||||
if (errorMsg.includes('[DEP0040]') ||
|
||||
errorMsg.includes('DeprecationWarning') ||
|
||||
errorMsg.includes('--trace-deprecation') ||
|
||||
errorMsg.includes('Loaded cached credentials')) {
|
||||
return;
|
||||
}
|
||||
|
||||
const socketSessionId = typeof ws.getSessionId === 'function' ? ws.getSessionId() : (capturedSessionId || sessionId);
|
||||
ws.send({
|
||||
type: 'gemini-error',
|
||||
sessionId: socketSessionId,
|
||||
error: errorMsg
|
||||
});
|
||||
});
|
||||
|
||||
// Handle process completion
|
||||
geminiProcess.on('close', async (code) => {
|
||||
clearTimeout(timeout);
|
||||
|
||||
// Flush any remaining buffered content
|
||||
if (responseHandler) {
|
||||
responseHandler.forceFlush();
|
||||
responseHandler.destroy();
|
||||
}
|
||||
|
||||
// Clean up process reference
|
||||
const finalSessionId = capturedSessionId || sessionId || processKey;
|
||||
activeGeminiProcesses.delete(finalSessionId);
|
||||
|
||||
// Save assistant response to session if we have one
|
||||
if (finalSessionId && assistantBlocks.length > 0) {
|
||||
sessionManager.addMessage(finalSessionId, 'assistant', assistantBlocks);
|
||||
}
|
||||
|
||||
ws.send({
|
||||
type: 'claude-complete', // Use claude-complete for compatibility with UI
|
||||
sessionId: finalSessionId,
|
||||
exitCode: code,
|
||||
isNewSession: !sessionId && !!command // Flag to indicate this was a new session
|
||||
});
|
||||
|
||||
// Clean up temporary image files if any
|
||||
if (geminiProcess.tempImagePaths && geminiProcess.tempImagePaths.length > 0) {
|
||||
for (const imagePath of geminiProcess.tempImagePaths) {
|
||||
await fs.unlink(imagePath).catch(err => { });
|
||||
}
|
||||
if (geminiProcess.tempDir) {
|
||||
await fs.rm(geminiProcess.tempDir, { recursive: true, force: true }).catch(err => { });
|
||||
}
|
||||
}
|
||||
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(code === null ? 'Gemini CLI process was terminated or timed out' : `Gemini CLI exited with code ${code}`));
|
||||
}
|
||||
});
|
||||
|
||||
// Handle process errors
|
||||
geminiProcess.on('error', (error) => {
|
||||
// Clean up process reference on error
|
||||
const finalSessionId = capturedSessionId || sessionId || processKey;
|
||||
activeGeminiProcesses.delete(finalSessionId);
|
||||
|
||||
const errorSessionId = typeof ws.getSessionId === 'function' ? ws.getSessionId() : finalSessionId;
|
||||
ws.send({
|
||||
type: 'gemini-error',
|
||||
sessionId: errorSessionId,
|
||||
error: error.message
|
||||
});
|
||||
|
||||
reject(error);
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function abortGeminiSession(sessionId) {
|
||||
let geminiProc = activeGeminiProcesses.get(sessionId);
|
||||
let processKey = sessionId;
|
||||
|
||||
if (!geminiProc) {
|
||||
for (const [key, proc] of activeGeminiProcesses.entries()) {
|
||||
if (proc.sessionId === sessionId) {
|
||||
geminiProc = proc;
|
||||
processKey = key;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (geminiProc) {
|
||||
try {
|
||||
geminiProc.kill('SIGTERM');
|
||||
setTimeout(() => {
|
||||
if (activeGeminiProcesses.has(processKey)) {
|
||||
try {
|
||||
geminiProc.kill('SIGKILL');
|
||||
} catch (e) { }
|
||||
}
|
||||
}, 2000); // Wait 2 seconds before force kill
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function isGeminiSessionActive(sessionId) {
|
||||
return activeGeminiProcesses.has(sessionId);
|
||||
}
|
||||
|
||||
function getActiveGeminiSessions() {
|
||||
return Array.from(activeGeminiProcesses.keys());
|
||||
}
|
||||
|
||||
export {
|
||||
spawnGemini,
|
||||
abortGeminiSession,
|
||||
isGeminiSessionActive,
|
||||
getActiveGeminiSessions
|
||||
};
|
||||
140
server/gemini-response-handler.js
Normal file
140
server/gemini-response-handler.js
Normal file
@@ -0,0 +1,140 @@
|
||||
// Gemini Response Handler - JSON Stream processing
|
||||
class GeminiResponseHandler {
|
||||
constructor(ws, options = {}) {
|
||||
this.ws = ws;
|
||||
this.buffer = '';
|
||||
this.onContentFragment = options.onContentFragment || null;
|
||||
this.onInit = options.onInit || null;
|
||||
this.onToolUse = options.onToolUse || null;
|
||||
this.onToolResult = options.onToolResult || null;
|
||||
}
|
||||
|
||||
// Process incoming raw data from Gemini stream-json
|
||||
processData(data) {
|
||||
this.buffer += data;
|
||||
|
||||
// Split by newline
|
||||
const lines = this.buffer.split('\n');
|
||||
|
||||
// Keep the last incomplete line in the buffer
|
||||
this.buffer = lines.pop() || '';
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
try {
|
||||
const event = JSON.parse(line);
|
||||
this.handleEvent(event);
|
||||
} catch (err) {
|
||||
// Not a JSON line, probably debug output or CLI warnings
|
||||
// console.error('[Gemini Handler] Non-JSON line ignored:', line);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handleEvent(event) {
|
||||
const socketSessionId = typeof this.ws.getSessionId === 'function' ? this.ws.getSessionId() : null;
|
||||
|
||||
if (event.type === 'init') {
|
||||
if (this.onInit) {
|
||||
this.onInit(event);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type === 'message' && event.role === 'assistant') {
|
||||
const content = event.content || '';
|
||||
|
||||
// Notify the parent CLI handler of accumulated text
|
||||
if (this.onContentFragment && content) {
|
||||
this.onContentFragment(content);
|
||||
}
|
||||
|
||||
let payload = {
|
||||
type: 'gemini-response',
|
||||
data: {
|
||||
type: 'message',
|
||||
content: content,
|
||||
isPartial: event.delta === true
|
||||
}
|
||||
};
|
||||
if (socketSessionId) payload.sessionId = socketSessionId;
|
||||
this.ws.send(payload);
|
||||
}
|
||||
else if (event.type === 'tool_use') {
|
||||
if (this.onToolUse) {
|
||||
this.onToolUse(event);
|
||||
}
|
||||
let payload = {
|
||||
type: 'gemini-tool-use',
|
||||
toolName: event.tool_name,
|
||||
toolId: event.tool_id,
|
||||
parameters: event.parameters || {}
|
||||
};
|
||||
if (socketSessionId) payload.sessionId = socketSessionId;
|
||||
this.ws.send(payload);
|
||||
}
|
||||
else if (event.type === 'tool_result') {
|
||||
if (this.onToolResult) {
|
||||
this.onToolResult(event);
|
||||
}
|
||||
let payload = {
|
||||
type: 'gemini-tool-result',
|
||||
toolId: event.tool_id,
|
||||
status: event.status,
|
||||
output: event.output || ''
|
||||
};
|
||||
if (socketSessionId) payload.sessionId = socketSessionId;
|
||||
this.ws.send(payload);
|
||||
}
|
||||
else if (event.type === 'result') {
|
||||
// Send a finalize message string
|
||||
let payload = {
|
||||
type: 'gemini-response',
|
||||
data: {
|
||||
type: 'message',
|
||||
content: '',
|
||||
isPartial: false
|
||||
}
|
||||
};
|
||||
if (socketSessionId) payload.sessionId = socketSessionId;
|
||||
this.ws.send(payload);
|
||||
|
||||
if (event.stats && event.stats.total_tokens) {
|
||||
let statsPayload = {
|
||||
type: 'claude-status',
|
||||
data: {
|
||||
status: 'Complete',
|
||||
tokens: event.stats.total_tokens
|
||||
}
|
||||
};
|
||||
if (socketSessionId) statsPayload.sessionId = socketSessionId;
|
||||
this.ws.send(statsPayload);
|
||||
}
|
||||
}
|
||||
else if (event.type === 'error') {
|
||||
let payload = {
|
||||
type: 'gemini-error',
|
||||
error: event.error || event.message || 'Unknown Gemini streaming error'
|
||||
};
|
||||
if (socketSessionId) payload.sessionId = socketSessionId;
|
||||
this.ws.send(payload);
|
||||
}
|
||||
}
|
||||
|
||||
forceFlush() {
|
||||
// If the buffer has content, try to parse it one last time
|
||||
if (this.buffer.trim()) {
|
||||
try {
|
||||
const event = JSON.parse(this.buffer);
|
||||
this.handleEvent(event);
|
||||
} catch (err) { }
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.buffer = '';
|
||||
}
|
||||
}
|
||||
|
||||
export default GeminiResponseHandler;
|
||||
516
server/index.js
516
server/index.js
@@ -48,6 +48,8 @@ import { getProjects, getSessions, getSessionMessages, renameProject, deleteSess
|
||||
import { queryClaudeSDK, abortClaudeSDKSession, isClaudeSDKSessionActive, getActiveClaudeSDKSessions, resolveToolApproval } from './claude-sdk.js';
|
||||
import { spawnCursor, abortCursorSession, isCursorSessionActive, getActiveCursorSessions } from './cursor-cli.js';
|
||||
import { queryCodex, abortCodexSession, isCodexSessionActive, getActiveCodexSessions } from './openai-codex.js';
|
||||
import { spawnGemini, abortGeminiSession, isGeminiSessionActive, getActiveGeminiSessions } from './gemini-cli.js';
|
||||
import sessionManager from './sessionManager.js';
|
||||
import gitRoutes from './routes/git.js';
|
||||
import authRoutes from './routes/auth.js';
|
||||
import mcpRoutes from './routes/mcp.js';
|
||||
@@ -61,6 +63,7 @@ import projectsRoutes, { WORKSPACES_ROOT, validateWorkspacePath } from './routes
|
||||
import cliAuthRoutes from './routes/cli-auth.js';
|
||||
import userRoutes from './routes/user.js';
|
||||
import codexRoutes from './routes/codex.js';
|
||||
import geminiRoutes from './routes/gemini.js';
|
||||
import { initializeDatabase } from './database/db.js';
|
||||
import { validateApiKey, authenticateToken, authenticateWebSocket } from './middleware/auth.js';
|
||||
import { IS_PLATFORM } from './constants/config.js';
|
||||
@@ -69,7 +72,9 @@ import { IS_PLATFORM } from './constants/config.js';
|
||||
const PROVIDER_WATCH_PATHS = [
|
||||
{ provider: 'claude', rootPath: path.join(os.homedir(), '.claude', 'projects') },
|
||||
{ provider: 'cursor', rootPath: path.join(os.homedir(), '.cursor', 'chats') },
|
||||
{ provider: 'codex', rootPath: path.join(os.homedir(), '.codex', 'sessions') }
|
||||
{ provider: 'codex', rootPath: path.join(os.homedir(), '.codex', 'sessions') },
|
||||
{ provider: 'gemini', rootPath: path.join(os.homedir(), '.gemini', 'projects') },
|
||||
{ provider: 'gemini_sessions', rootPath: path.join(os.homedir(), '.gemini', 'sessions') }
|
||||
];
|
||||
const WATCHER_IGNORED_PATTERNS = [
|
||||
'**/node_modules/**',
|
||||
@@ -319,25 +324,25 @@ app.locals.wss = wss;
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json({
|
||||
limit: '50mb',
|
||||
type: (req) => {
|
||||
// Skip multipart/form-data requests (for file uploads like images)
|
||||
const contentType = req.headers['content-type'] || '';
|
||||
if (contentType.includes('multipart/form-data')) {
|
||||
return false;
|
||||
limit: '50mb',
|
||||
type: (req) => {
|
||||
// Skip multipart/form-data requests (for file uploads like images)
|
||||
const contentType = req.headers['content-type'] || '';
|
||||
if (contentType.includes('multipart/form-data')) {
|
||||
return false;
|
||||
}
|
||||
return contentType.includes('json');
|
||||
}
|
||||
return contentType.includes('json');
|
||||
}
|
||||
}));
|
||||
app.use(express.urlencoded({ limit: '50mb', extended: true }));
|
||||
|
||||
// Public health check endpoint (no authentication required)
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
installMode
|
||||
});
|
||||
res.json({
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
installMode
|
||||
});
|
||||
});
|
||||
|
||||
// Optional API key validation (if configured)
|
||||
@@ -379,6 +384,9 @@ app.use('/api/user', authenticateToken, userRoutes);
|
||||
// Codex API Routes (protected)
|
||||
app.use('/api/codex', authenticateToken, codexRoutes);
|
||||
|
||||
// Gemini API Routes (protected)
|
||||
app.use('/api/gemini', authenticateToken, geminiRoutes);
|
||||
|
||||
// Agent API Routes (uses API key authentication)
|
||||
app.use('/api/agent', agentRoutes);
|
||||
|
||||
@@ -388,17 +396,17 @@ app.use(express.static(path.join(__dirname, '../public')));
|
||||
// Static files served after API routes
|
||||
// Add cache control: HTML files should not be cached, but assets can be cached
|
||||
app.use(express.static(path.join(__dirname, '../dist'), {
|
||||
setHeaders: (res, filePath) => {
|
||||
if (filePath.endsWith('.html')) {
|
||||
// Prevent HTML caching to avoid service worker issues after builds
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate');
|
||||
res.setHeader('Pragma', 'no-cache');
|
||||
res.setHeader('Expires', '0');
|
||||
} else if (filePath.match(/\.(js|css|woff2?|ttf|eot|svg|png|jpg|jpeg|gif|ico)$/)) {
|
||||
// Cache static assets for 1 year (they have hashed names)
|
||||
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable');
|
||||
setHeaders: (res, filePath) => {
|
||||
if (filePath.endsWith('.html')) {
|
||||
// Prevent HTML caching to avoid service worker issues after builds
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate');
|
||||
res.setHeader('Pragma', 'no-cache');
|
||||
res.setHeader('Expires', '0');
|
||||
} else if (filePath.match(/\.(js|css|woff2?|ttf|eot|svg|png|jpg|jpeg|gif|ico)$/)) {
|
||||
// Cache static assets for 1 year (they have hashed names)
|
||||
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable');
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
// API Routes (protected)
|
||||
@@ -496,13 +504,13 @@ app.get('/api/projects/:projectName/sessions/:sessionId/messages', authenticateT
|
||||
try {
|
||||
const { projectName, sessionId } = req.params;
|
||||
const { limit, offset } = req.query;
|
||||
|
||||
|
||||
// Parse limit and offset if provided
|
||||
const parsedLimit = limit ? parseInt(limit, 10) : null;
|
||||
const parsedOffset = offset ? parseInt(offset, 10) : 0;
|
||||
|
||||
|
||||
const result = await getSessionMessages(projectName, sessionId, parsedLimit, parsedOffset);
|
||||
|
||||
|
||||
// Handle both old and new response formats
|
||||
if (Array.isArray(result)) {
|
||||
// Backward compatibility: no pagination parameters were provided
|
||||
@@ -585,13 +593,13 @@ const expandWorkspacePath = (inputPath) => {
|
||||
app.get('/api/browse-filesystem', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { path: dirPath } = req.query;
|
||||
|
||||
|
||||
console.log('[API] Browse filesystem request for path:', dirPath);
|
||||
console.log('[API] WORKSPACES_ROOT is:', WORKSPACES_ROOT);
|
||||
// Default to home directory if no path provided
|
||||
const defaultRoot = WORKSPACES_ROOT;
|
||||
let targetPath = dirPath ? expandWorkspacePath(dirPath) : defaultRoot;
|
||||
|
||||
|
||||
// Resolve and normalize the path
|
||||
targetPath = path.resolve(targetPath);
|
||||
|
||||
@@ -601,22 +609,22 @@ app.get('/api/browse-filesystem', authenticateToken, async (req, res) => {
|
||||
return res.status(403).json({ error: validation.error });
|
||||
}
|
||||
const resolvedPath = validation.resolvedPath || targetPath;
|
||||
|
||||
|
||||
// Security check - ensure path is accessible
|
||||
try {
|
||||
await fs.promises.access(resolvedPath);
|
||||
const stats = await fs.promises.stat(resolvedPath);
|
||||
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
return res.status(400).json({ error: 'Path is not a directory' });
|
||||
}
|
||||
} catch (err) {
|
||||
return res.status(404).json({ error: 'Directory not accessible' });
|
||||
}
|
||||
|
||||
|
||||
// Use existing getFileTree function with shallow depth (only direct children)
|
||||
const fileTree = await getFileTree(resolvedPath, 1, 0, false); // maxDepth=1, showHidden=false
|
||||
|
||||
|
||||
// Filter only directories and format for suggestions
|
||||
const directories = fileTree
|
||||
.filter(item => item.type === 'directory')
|
||||
@@ -632,7 +640,7 @@ app.get('/api/browse-filesystem', authenticateToken, async (req, res) => {
|
||||
if (!aHidden && bHidden) return -1;
|
||||
return a.name.localeCompare(b.name);
|
||||
});
|
||||
|
||||
|
||||
// Add common directories if browsing home directory
|
||||
const suggestions = [];
|
||||
let resolvedWorkspaceRoot = defaultRoot;
|
||||
@@ -645,17 +653,17 @@ app.get('/api/browse-filesystem', authenticateToken, async (req, res) => {
|
||||
const commonDirs = ['Desktop', 'Documents', 'Projects', 'Development', 'Dev', 'Code', 'workspace'];
|
||||
const existingCommon = directories.filter(dir => commonDirs.includes(dir.name));
|
||||
const otherDirs = directories.filter(dir => !commonDirs.includes(dir.name));
|
||||
|
||||
|
||||
suggestions.push(...existingCommon, ...otherDirs);
|
||||
} else {
|
||||
suggestions.push(...directories);
|
||||
}
|
||||
|
||||
|
||||
res.json({
|
||||
path: resolvedPath,
|
||||
suggestions: suggestions
|
||||
});
|
||||
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error browsing filesystem:', error);
|
||||
res.status(500).json({ error: 'Failed to browse filesystem' });
|
||||
@@ -899,26 +907,26 @@ wss.on('connection', (ws, request) => {
|
||||
* WebSocket Writer - Wrapper for WebSocket to match SSEStreamWriter interface
|
||||
*/
|
||||
class WebSocketWriter {
|
||||
constructor(ws) {
|
||||
this.ws = ws;
|
||||
this.sessionId = null;
|
||||
this.isWebSocketWriter = true; // Marker for transport detection
|
||||
}
|
||||
|
||||
send(data) {
|
||||
if (this.ws.readyState === 1) { // WebSocket.OPEN
|
||||
// Providers send raw objects, we stringify for WebSocket
|
||||
this.ws.send(JSON.stringify(data));
|
||||
constructor(ws) {
|
||||
this.ws = ws;
|
||||
this.sessionId = null;
|
||||
this.isWebSocketWriter = true; // Marker for transport detection
|
||||
}
|
||||
}
|
||||
|
||||
setSessionId(sessionId) {
|
||||
this.sessionId = sessionId;
|
||||
}
|
||||
send(data) {
|
||||
if (this.ws.readyState === 1) { // WebSocket.OPEN
|
||||
// Providers send raw objects, we stringify for WebSocket
|
||||
this.ws.send(JSON.stringify(data));
|
||||
}
|
||||
}
|
||||
|
||||
getSessionId() {
|
||||
return this.sessionId;
|
||||
}
|
||||
setSessionId(sessionId) {
|
||||
this.sessionId = sessionId;
|
||||
}
|
||||
|
||||
getSessionId() {
|
||||
return this.sessionId;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle chat WebSocket connections
|
||||
@@ -954,6 +962,12 @@ function handleChatConnection(ws) {
|
||||
console.log('🔄 Session:', data.options?.sessionId ? 'Resume' : 'New');
|
||||
console.log('🤖 Model:', data.options?.model || 'default');
|
||||
await queryCodex(data.command, data.options, writer);
|
||||
} else if (data.type === 'gemini-command') {
|
||||
console.log('[DEBUG] Gemini message:', data.command || '[Continue/Resume]');
|
||||
console.log('📁 Project:', data.options?.projectPath || data.options?.cwd || 'Unknown');
|
||||
console.log('🔄 Session:', data.options?.sessionId ? 'Resume' : 'New');
|
||||
console.log('🤖 Model:', data.options?.model || 'default');
|
||||
await spawnGemini(data.command, data.options, writer);
|
||||
} else if (data.type === 'cursor-resume') {
|
||||
// Backward compatibility: treat as cursor-command with resume and no prompt
|
||||
console.log('[DEBUG] Cursor resume session (compat):', data.sessionId);
|
||||
@@ -971,6 +985,8 @@ function handleChatConnection(ws) {
|
||||
success = abortCursorSession(data.sessionId);
|
||||
} else if (provider === 'codex') {
|
||||
success = abortCodexSession(data.sessionId);
|
||||
} else if (provider === 'gemini') {
|
||||
success = abortGeminiSession(data.sessionId);
|
||||
} else {
|
||||
// Use Claude Agents SDK
|
||||
success = await abortClaudeSDKSession(data.sessionId);
|
||||
@@ -1013,6 +1029,8 @@ function handleChatConnection(ws) {
|
||||
isActive = isCursorSessionActive(sessionId);
|
||||
} else if (provider === 'codex') {
|
||||
isActive = isCodexSessionActive(sessionId);
|
||||
} else if (provider === 'gemini') {
|
||||
isActive = isGeminiSessionActive(sessionId);
|
||||
} else {
|
||||
// Use Claude Agents SDK
|
||||
isActive = isClaudeSDKSessionActive(sessionId);
|
||||
@@ -1029,7 +1047,8 @@ function handleChatConnection(ws) {
|
||||
const activeSessions = {
|
||||
claude: getActiveClaudeSDKSessions(),
|
||||
cursor: getActiveCursorSessions(),
|
||||
codex: getActiveCodexSessions()
|
||||
codex: getActiveCodexSessions(),
|
||||
gemini: getActiveGeminiSessions()
|
||||
};
|
||||
writer.send({
|
||||
type: 'active-sessions',
|
||||
@@ -1138,7 +1157,7 @@ function handleShellConnection(ws) {
|
||||
if (isPlainShell) {
|
||||
welcomeMsg = `\x1b[36mStarting terminal in: ${projectPath}\x1b[0m\r\n`;
|
||||
} else {
|
||||
const providerName = provider === 'cursor' ? 'Cursor' : provider === 'codex' ? 'Codex' : 'Claude';
|
||||
const providerName = provider === 'cursor' ? 'Cursor' : (provider === 'codex' ? 'Codex' : (provider === 'gemini' ? 'Gemini' : 'Claude'));
|
||||
welcomeMsg = hasSession ?
|
||||
`\x1b[36mResuming ${providerName} session ${sessionId} in: ${projectPath}\x1b[0m\r\n` :
|
||||
`\x1b[36mStarting new ${providerName} session in: ${projectPath}\x1b[0m\r\n`;
|
||||
@@ -1174,6 +1193,7 @@ function handleShellConnection(ws) {
|
||||
shellCommand = `cd "${projectPath}" && cursor-agent`;
|
||||
}
|
||||
}
|
||||
|
||||
} else if (provider === 'codex') {
|
||||
// Use codex command
|
||||
if (os.platform() === 'win32') {
|
||||
@@ -1191,6 +1211,37 @@ function handleShellConnection(ws) {
|
||||
shellCommand = `cd "${projectPath}" && codex`;
|
||||
}
|
||||
}
|
||||
} else if (provider === 'gemini') {
|
||||
// Use gemini command
|
||||
const command = initialCommand || 'gemini';
|
||||
let resumeId = sessionId;
|
||||
if (hasSession && sessionId) {
|
||||
try {
|
||||
// Gemini CLI enforces its own native session IDs, unlike other agents that accept arbitrary string names.
|
||||
// The UI only knows about its internal generated `sessionId` (e.g. gemini_1234).
|
||||
// We must fetch the mapping from the backend session manager to pass the native `cliSessionId` to the shell.
|
||||
const sess = sessionManager.getSession(sessionId);
|
||||
if (sess && sess.cliSessionId) {
|
||||
resumeId = sess.cliSessionId;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to get Gemini CLI session ID:', err);
|
||||
}
|
||||
}
|
||||
|
||||
if (os.platform() === 'win32') {
|
||||
if (hasSession && resumeId) {
|
||||
shellCommand = `Set-Location -Path "${projectPath}"; ${command} --resume "${resumeId}"`;
|
||||
} else {
|
||||
shellCommand = `Set-Location -Path "${projectPath}"; ${command}`;
|
||||
}
|
||||
} else {
|
||||
if (hasSession && resumeId) {
|
||||
shellCommand = `cd "${projectPath}" && ${command} --resume "${resumeId}"`;
|
||||
} else {
|
||||
shellCommand = `cd "${projectPath}" && ${command}`;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Use claude command (default) or initialCommand if provided
|
||||
const command = initialCommand || 'claude';
|
||||
@@ -1624,203 +1675,214 @@ app.post('/api/projects/:projectName/upload-images', authenticateToken, async (r
|
||||
|
||||
// Get token usage for a specific session
|
||||
app.get('/api/projects/:projectName/sessions/:sessionId/token-usage', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { projectName, sessionId } = req.params;
|
||||
const { provider = 'claude' } = req.query;
|
||||
const homeDir = os.homedir();
|
||||
try {
|
||||
const { projectName, sessionId } = req.params;
|
||||
const { provider = 'claude' } = req.query;
|
||||
const homeDir = os.homedir();
|
||||
|
||||
// Allow only safe characters in sessionId
|
||||
const safeSessionId = String(sessionId).replace(/[^a-zA-Z0-9._-]/g, '');
|
||||
if (!safeSessionId) {
|
||||
return res.status(400).json({ error: 'Invalid sessionId' });
|
||||
}
|
||||
// Allow only safe characters in sessionId
|
||||
const safeSessionId = String(sessionId).replace(/[^a-zA-Z0-9._-]/g, '');
|
||||
if (!safeSessionId) {
|
||||
return res.status(400).json({ error: 'Invalid sessionId' });
|
||||
}
|
||||
|
||||
// Handle Cursor sessions - they use SQLite and don't have token usage info
|
||||
if (provider === 'cursor') {
|
||||
return res.json({
|
||||
used: 0,
|
||||
total: 0,
|
||||
breakdown: { input: 0, cacheCreation: 0, cacheRead: 0 },
|
||||
unsupported: true,
|
||||
message: 'Token usage tracking not available for Cursor sessions'
|
||||
});
|
||||
}
|
||||
// Handle Cursor sessions - they use SQLite and don't have token usage info
|
||||
if (provider === 'cursor') {
|
||||
return res.json({
|
||||
used: 0,
|
||||
total: 0,
|
||||
breakdown: { input: 0, cacheCreation: 0, cacheRead: 0 },
|
||||
unsupported: true,
|
||||
message: 'Token usage tracking not available for Cursor sessions'
|
||||
});
|
||||
}
|
||||
|
||||
// Handle Codex sessions
|
||||
if (provider === 'codex') {
|
||||
const codexSessionsDir = path.join(homeDir, '.codex', 'sessions');
|
||||
// Handle Gemini sessions - they are raw logs in our current setup
|
||||
if (provider === 'gemini') {
|
||||
return res.json({
|
||||
used: 0,
|
||||
total: 0,
|
||||
breakdown: { input: 0, cacheCreation: 0, cacheRead: 0 },
|
||||
unsupported: true,
|
||||
message: 'Token usage tracking not available for Gemini sessions'
|
||||
});
|
||||
}
|
||||
|
||||
// Find the session file by searching for the session ID
|
||||
const findSessionFile = async (dir) => {
|
||||
try {
|
||||
const entries = await fsPromises.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
const found = await findSessionFile(fullPath);
|
||||
if (found) return found;
|
||||
} else if (entry.name.includes(safeSessionId) && entry.name.endsWith('.jsonl')) {
|
||||
return fullPath;
|
||||
// Handle Codex sessions
|
||||
if (provider === 'codex') {
|
||||
const codexSessionsDir = path.join(homeDir, '.codex', 'sessions');
|
||||
|
||||
// Find the session file by searching for the session ID
|
||||
const findSessionFile = async (dir) => {
|
||||
try {
|
||||
const entries = await fsPromises.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
const found = await findSessionFile(fullPath);
|
||||
if (found) return found;
|
||||
} else if (entry.name.includes(safeSessionId) && entry.name.endsWith('.jsonl')) {
|
||||
return fullPath;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip directories we can't read
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const sessionFilePath = await findSessionFile(codexSessionsDir);
|
||||
|
||||
if (!sessionFilePath) {
|
||||
return res.status(404).json({ error: 'Codex session file not found', sessionId: safeSessionId });
|
||||
}
|
||||
}
|
||||
|
||||
// Read and parse the Codex JSONL file
|
||||
let fileContent;
|
||||
try {
|
||||
fileContent = await fsPromises.readFile(sessionFilePath, 'utf8');
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({ error: 'Session file not found', path: sessionFilePath });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
const lines = fileContent.trim().split('\n');
|
||||
let totalTokens = 0;
|
||||
let contextWindow = 200000; // Default for Codex/OpenAI
|
||||
|
||||
// Find the latest token_count event with info (scan from end)
|
||||
for (let i = lines.length - 1; i >= 0; i--) {
|
||||
try {
|
||||
const entry = JSON.parse(lines[i]);
|
||||
|
||||
// Codex stores token info in event_msg with type: "token_count"
|
||||
if (entry.type === 'event_msg' && entry.payload?.type === 'token_count' && entry.payload?.info) {
|
||||
const tokenInfo = entry.payload.info;
|
||||
if (tokenInfo.total_token_usage) {
|
||||
totalTokens = tokenInfo.total_token_usage.total_tokens || 0;
|
||||
}
|
||||
if (tokenInfo.model_context_window) {
|
||||
contextWindow = tokenInfo.model_context_window;
|
||||
}
|
||||
break; // Stop after finding the latest token count
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Skip lines that can't be parsed
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return res.json({
|
||||
used: totalTokens,
|
||||
total: contextWindow
|
||||
});
|
||||
}
|
||||
|
||||
// Handle Claude sessions (default)
|
||||
// Extract actual project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
} catch (error) {
|
||||
// Skip directories we can't read
|
||||
console.error('Error extracting project directory:', error);
|
||||
return res.status(500).json({ error: 'Failed to determine project path' });
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const sessionFilePath = await findSessionFile(codexSessionsDir);
|
||||
// Construct the JSONL file path
|
||||
// Claude stores session files in ~/.claude/projects/[encoded-project-path]/[session-id].jsonl
|
||||
// The encoding replaces /, spaces, ~, and _ with -
|
||||
const encodedPath = projectPath.replace(/[\\/:\s~_]/g, '-');
|
||||
const projectDir = path.join(homeDir, '.claude', 'projects', encodedPath);
|
||||
|
||||
if (!sessionFilePath) {
|
||||
return res.status(404).json({ error: 'Codex session file not found', sessionId: safeSessionId });
|
||||
}
|
||||
const jsonlPath = path.join(projectDir, `${safeSessionId}.jsonl`);
|
||||
|
||||
// Read and parse the Codex JSONL file
|
||||
let fileContent;
|
||||
try {
|
||||
fileContent = await fsPromises.readFile(sessionFilePath, 'utf8');
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({ error: 'Session file not found', path: sessionFilePath });
|
||||
// Constrain to projectDir
|
||||
const rel = path.relative(path.resolve(projectDir), path.resolve(jsonlPath));
|
||||
if (rel.startsWith('..') || path.isAbsolute(rel)) {
|
||||
return res.status(400).json({ error: 'Invalid path' });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
const lines = fileContent.trim().split('\n');
|
||||
let totalTokens = 0;
|
||||
let contextWindow = 200000; // Default for Codex/OpenAI
|
||||
|
||||
// Find the latest token_count event with info (scan from end)
|
||||
for (let i = lines.length - 1; i >= 0; i--) {
|
||||
// Read and parse the JSONL file
|
||||
let fileContent;
|
||||
try {
|
||||
const entry = JSON.parse(lines[i]);
|
||||
|
||||
// Codex stores token info in event_msg with type: "token_count"
|
||||
if (entry.type === 'event_msg' && entry.payload?.type === 'token_count' && entry.payload?.info) {
|
||||
const tokenInfo = entry.payload.info;
|
||||
if (tokenInfo.total_token_usage) {
|
||||
totalTokens = tokenInfo.total_token_usage.total_tokens || 0;
|
||||
fileContent = await fsPromises.readFile(jsonlPath, 'utf8');
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({ error: 'Session file not found', path: jsonlPath });
|
||||
}
|
||||
if (tokenInfo.model_context_window) {
|
||||
contextWindow = tokenInfo.model_context_window;
|
||||
throw error; // Re-throw other errors to be caught by outer try-catch
|
||||
}
|
||||
const lines = fileContent.trim().split('\n');
|
||||
|
||||
const parsedContextWindow = parseInt(process.env.CONTEXT_WINDOW, 10);
|
||||
const contextWindow = Number.isFinite(parsedContextWindow) ? parsedContextWindow : 160000;
|
||||
let inputTokens = 0;
|
||||
let cacheCreationTokens = 0;
|
||||
let cacheReadTokens = 0;
|
||||
|
||||
// Find the latest assistant message with usage data (scan from end)
|
||||
for (let i = lines.length - 1; i >= 0; i--) {
|
||||
try {
|
||||
const entry = JSON.parse(lines[i]);
|
||||
|
||||
// Only count assistant messages which have usage data
|
||||
if (entry.type === 'assistant' && entry.message?.usage) {
|
||||
const usage = entry.message.usage;
|
||||
|
||||
// Use token counts from latest assistant message only
|
||||
inputTokens = usage.input_tokens || 0;
|
||||
cacheCreationTokens = usage.cache_creation_input_tokens || 0;
|
||||
cacheReadTokens = usage.cache_read_input_tokens || 0;
|
||||
|
||||
break; // Stop after finding the latest assistant message
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Skip lines that can't be parsed
|
||||
continue;
|
||||
}
|
||||
break; // Stop after finding the latest token count
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Skip lines that can't be parsed
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return res.json({
|
||||
used: totalTokens,
|
||||
total: contextWindow
|
||||
});
|
||||
}
|
||||
// Calculate total context usage (excluding output_tokens, as per ccusage)
|
||||
const totalUsed = inputTokens + cacheCreationTokens + cacheReadTokens;
|
||||
|
||||
// Handle Claude sessions (default)
|
||||
// Extract actual project path
|
||||
let projectPath;
|
||||
try {
|
||||
projectPath = await extractProjectDirectory(projectName);
|
||||
res.json({
|
||||
used: totalUsed,
|
||||
total: contextWindow,
|
||||
breakdown: {
|
||||
input: inputTokens,
|
||||
cacheCreation: cacheCreationTokens,
|
||||
cacheRead: cacheReadTokens
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error extracting project directory:', error);
|
||||
return res.status(500).json({ error: 'Failed to determine project path' });
|
||||
console.error('Error reading session token usage:', error);
|
||||
res.status(500).json({ error: 'Failed to read session token usage' });
|
||||
}
|
||||
|
||||
// Construct the JSONL file path
|
||||
// Claude stores session files in ~/.claude/projects/[encoded-project-path]/[session-id].jsonl
|
||||
// The encoding replaces /, spaces, ~, and _ with -
|
||||
const encodedPath = projectPath.replace(/[\\/:\s~_]/g, '-');
|
||||
const projectDir = path.join(homeDir, '.claude', 'projects', encodedPath);
|
||||
|
||||
const jsonlPath = path.join(projectDir, `${safeSessionId}.jsonl`);
|
||||
|
||||
// Constrain to projectDir
|
||||
const rel = path.relative(path.resolve(projectDir), path.resolve(jsonlPath));
|
||||
if (rel.startsWith('..') || path.isAbsolute(rel)) {
|
||||
return res.status(400).json({ error: 'Invalid path' });
|
||||
}
|
||||
|
||||
// Read and parse the JSONL file
|
||||
let fileContent;
|
||||
try {
|
||||
fileContent = await fsPromises.readFile(jsonlPath, 'utf8');
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({ error: 'Session file not found', path: jsonlPath });
|
||||
}
|
||||
throw error; // Re-throw other errors to be caught by outer try-catch
|
||||
}
|
||||
const lines = fileContent.trim().split('\n');
|
||||
|
||||
const parsedContextWindow = parseInt(process.env.CONTEXT_WINDOW, 10);
|
||||
const contextWindow = Number.isFinite(parsedContextWindow) ? parsedContextWindow : 160000;
|
||||
let inputTokens = 0;
|
||||
let cacheCreationTokens = 0;
|
||||
let cacheReadTokens = 0;
|
||||
|
||||
// Find the latest assistant message with usage data (scan from end)
|
||||
for (let i = lines.length - 1; i >= 0; i--) {
|
||||
try {
|
||||
const entry = JSON.parse(lines[i]);
|
||||
|
||||
// Only count assistant messages which have usage data
|
||||
if (entry.type === 'assistant' && entry.message?.usage) {
|
||||
const usage = entry.message.usage;
|
||||
|
||||
// Use token counts from latest assistant message only
|
||||
inputTokens = usage.input_tokens || 0;
|
||||
cacheCreationTokens = usage.cache_creation_input_tokens || 0;
|
||||
cacheReadTokens = usage.cache_read_input_tokens || 0;
|
||||
|
||||
break; // Stop after finding the latest assistant message
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Skip lines that can't be parsed
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate total context usage (excluding output_tokens, as per ccusage)
|
||||
const totalUsed = inputTokens + cacheCreationTokens + cacheReadTokens;
|
||||
|
||||
res.json({
|
||||
used: totalUsed,
|
||||
total: contextWindow,
|
||||
breakdown: {
|
||||
input: inputTokens,
|
||||
cacheCreation: cacheCreationTokens,
|
||||
cacheRead: cacheReadTokens
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error reading session token usage:', error);
|
||||
res.status(500).json({ error: 'Failed to read session token usage' });
|
||||
}
|
||||
});
|
||||
|
||||
// Serve React app for all other routes (excluding static files)
|
||||
app.get('*', (req, res) => {
|
||||
// Skip requests for static assets (files with extensions)
|
||||
if (path.extname(req.path)) {
|
||||
return res.status(404).send('Not found');
|
||||
}
|
||||
// Skip requests for static assets (files with extensions)
|
||||
if (path.extname(req.path)) {
|
||||
return res.status(404).send('Not found');
|
||||
}
|
||||
|
||||
// Only serve index.html for HTML routes, not for static assets
|
||||
// Static assets should already be handled by express.static middleware above
|
||||
const indexPath = path.join(__dirname, '../dist/index.html');
|
||||
// Only serve index.html for HTML routes, not for static assets
|
||||
// Static assets should already be handled by express.static middleware above
|
||||
const indexPath = path.join(__dirname, '../dist/index.html');
|
||||
|
||||
// Check if dist/index.html exists (production build available)
|
||||
if (fs.existsSync(indexPath)) {
|
||||
// Set no-cache headers for HTML to prevent service worker issues
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate');
|
||||
res.setHeader('Pragma', 'no-cache');
|
||||
res.setHeader('Expires', '0');
|
||||
res.sendFile(indexPath);
|
||||
} else {
|
||||
// In development, redirect to Vite dev server only if dist doesn't exist
|
||||
res.redirect(`http://localhost:${process.env.VITE_PORT || 5173}`);
|
||||
}
|
||||
// Check if dist/index.html exists (production build available)
|
||||
if (fs.existsSync(indexPath)) {
|
||||
// Set no-cache headers for HTML to prevent service worker issues
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate');
|
||||
res.setHeader('Pragma', 'no-cache');
|
||||
res.setHeader('Expires', '0');
|
||||
res.sendFile(indexPath);
|
||||
} else {
|
||||
// In development, redirect to Vite dev server only if dist doesn't exist
|
||||
res.redirect(`http://localhost:${process.env.VITE_PORT || 5173}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Helper function to convert permissions to rwx format
|
||||
|
||||
@@ -65,133 +65,134 @@ import crypto from 'crypto';
|
||||
import sqlite3 from 'sqlite3';
|
||||
import { open } from 'sqlite';
|
||||
import os from 'os';
|
||||
import sessionManager from './sessionManager.js';
|
||||
|
||||
// Import TaskMaster detection functions
|
||||
async function detectTaskMasterFolder(projectPath) {
|
||||
try {
|
||||
const taskMasterPath = path.join(projectPath, '.taskmaster');
|
||||
|
||||
// Check if .taskmaster directory exists
|
||||
try {
|
||||
const taskMasterPath = path.join(projectPath, '.taskmaster');
|
||||
|
||||
// Check if .taskmaster directory exists
|
||||
try {
|
||||
const stats = await fs.stat(taskMasterPath);
|
||||
if (!stats.isDirectory()) {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster exists but is not a directory'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster directory not found'
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Check for key TaskMaster files
|
||||
const keyFiles = [
|
||||
'tasks/tasks.json',
|
||||
'config.json'
|
||||
];
|
||||
|
||||
const fileStatus = {};
|
||||
let hasEssentialFiles = true;
|
||||
|
||||
for (const file of keyFiles) {
|
||||
const filePath = path.join(taskMasterPath, file);
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
fileStatus[file] = true;
|
||||
} catch (error) {
|
||||
fileStatus[file] = false;
|
||||
if (file === 'tasks/tasks.json') {
|
||||
hasEssentialFiles = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse tasks.json if it exists for metadata
|
||||
let taskMetadata = null;
|
||||
if (fileStatus['tasks/tasks.json']) {
|
||||
try {
|
||||
const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json');
|
||||
const tasksContent = await fs.readFile(tasksPath, 'utf8');
|
||||
const tasksData = JSON.parse(tasksContent);
|
||||
|
||||
// Handle both tagged and legacy formats
|
||||
let tasks = [];
|
||||
if (tasksData.tasks) {
|
||||
// Legacy format
|
||||
tasks = tasksData.tasks;
|
||||
} else {
|
||||
// Tagged format - get tasks from all tags
|
||||
Object.values(tasksData).forEach(tagData => {
|
||||
if (tagData.tasks) {
|
||||
tasks = tasks.concat(tagData.tasks);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate task statistics
|
||||
const stats = tasks.reduce((acc, task) => {
|
||||
acc.total++;
|
||||
acc[task.status] = (acc[task.status] || 0) + 1;
|
||||
|
||||
// Count subtasks
|
||||
if (task.subtasks) {
|
||||
task.subtasks.forEach(subtask => {
|
||||
acc.subtotalTasks++;
|
||||
acc.subtasks = acc.subtasks || {};
|
||||
acc.subtasks[subtask.status] = (acc.subtasks[subtask.status] || 0) + 1;
|
||||
});
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {
|
||||
total: 0,
|
||||
subtotalTasks: 0,
|
||||
pending: 0,
|
||||
'in-progress': 0,
|
||||
done: 0,
|
||||
review: 0,
|
||||
deferred: 0,
|
||||
cancelled: 0,
|
||||
subtasks: {}
|
||||
});
|
||||
|
||||
taskMetadata = {
|
||||
taskCount: stats.total,
|
||||
subtaskCount: stats.subtotalTasks,
|
||||
completed: stats.done || 0,
|
||||
pending: stats.pending || 0,
|
||||
inProgress: stats['in-progress'] || 0,
|
||||
review: stats.review || 0,
|
||||
completionPercentage: stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0,
|
||||
lastModified: (await fs.stat(tasksPath)).mtime.toISOString()
|
||||
};
|
||||
} catch (parseError) {
|
||||
console.warn('Failed to parse tasks.json:', parseError.message);
|
||||
taskMetadata = { error: 'Failed to parse tasks.json' };
|
||||
}
|
||||
}
|
||||
|
||||
const stats = await fs.stat(taskMasterPath);
|
||||
if (!stats.isDirectory()) {
|
||||
return {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles,
|
||||
files: fileStatus,
|
||||
metadata: taskMetadata,
|
||||
path: taskMasterPath
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster exists but is not a directory'
|
||||
};
|
||||
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error detecting TaskMaster folder:', error);
|
||||
if (error.code === 'ENOENT') {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: `Error checking directory: ${error.message}`
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster directory not found'
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Check for key TaskMaster files
|
||||
const keyFiles = [
|
||||
'tasks/tasks.json',
|
||||
'config.json'
|
||||
];
|
||||
|
||||
const fileStatus = {};
|
||||
let hasEssentialFiles = true;
|
||||
|
||||
for (const file of keyFiles) {
|
||||
const filePath = path.join(taskMasterPath, file);
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
fileStatus[file] = true;
|
||||
} catch (error) {
|
||||
fileStatus[file] = false;
|
||||
if (file === 'tasks/tasks.json') {
|
||||
hasEssentialFiles = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse tasks.json if it exists for metadata
|
||||
let taskMetadata = null;
|
||||
if (fileStatus['tasks/tasks.json']) {
|
||||
try {
|
||||
const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json');
|
||||
const tasksContent = await fs.readFile(tasksPath, 'utf8');
|
||||
const tasksData = JSON.parse(tasksContent);
|
||||
|
||||
// Handle both tagged and legacy formats
|
||||
let tasks = [];
|
||||
if (tasksData.tasks) {
|
||||
// Legacy format
|
||||
tasks = tasksData.tasks;
|
||||
} else {
|
||||
// Tagged format - get tasks from all tags
|
||||
Object.values(tasksData).forEach(tagData => {
|
||||
if (tagData.tasks) {
|
||||
tasks = tasks.concat(tagData.tasks);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate task statistics
|
||||
const stats = tasks.reduce((acc, task) => {
|
||||
acc.total++;
|
||||
acc[task.status] = (acc[task.status] || 0) + 1;
|
||||
|
||||
// Count subtasks
|
||||
if (task.subtasks) {
|
||||
task.subtasks.forEach(subtask => {
|
||||
acc.subtotalTasks++;
|
||||
acc.subtasks = acc.subtasks || {};
|
||||
acc.subtasks[subtask.status] = (acc.subtasks[subtask.status] || 0) + 1;
|
||||
});
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {
|
||||
total: 0,
|
||||
subtotalTasks: 0,
|
||||
pending: 0,
|
||||
'in-progress': 0,
|
||||
done: 0,
|
||||
review: 0,
|
||||
deferred: 0,
|
||||
cancelled: 0,
|
||||
subtasks: {}
|
||||
});
|
||||
|
||||
taskMetadata = {
|
||||
taskCount: stats.total,
|
||||
subtaskCount: stats.subtotalTasks,
|
||||
completed: stats.done || 0,
|
||||
pending: stats.pending || 0,
|
||||
inProgress: stats['in-progress'] || 0,
|
||||
review: stats.review || 0,
|
||||
completionPercentage: stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0,
|
||||
lastModified: (await fs.stat(tasksPath)).mtime.toISOString()
|
||||
};
|
||||
} catch (parseError) {
|
||||
console.warn('Failed to parse tasks.json:', parseError.message);
|
||||
taskMetadata = { error: 'Failed to parse tasks.json' };
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles,
|
||||
files: fileStatus,
|
||||
metadata: taskMetadata,
|
||||
path: taskMasterPath
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error detecting TaskMaster folder:', error);
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: `Error checking directory: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Cache for extracted project directories
|
||||
@@ -218,7 +219,7 @@ async function loadProjectConfig() {
|
||||
async function saveProjectConfig(config) {
|
||||
const claudeDir = path.join(os.homedir(), '.claude');
|
||||
const configPath = path.join(claudeDir, 'project-config.json');
|
||||
|
||||
|
||||
// Ensure the .claude directory exists
|
||||
try {
|
||||
await fs.mkdir(claudeDir, { recursive: true });
|
||||
@@ -227,7 +228,7 @@ async function saveProjectConfig(config) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
await fs.writeFile(configPath, JSON.stringify(config, null, 2), 'utf8');
|
||||
}
|
||||
|
||||
@@ -235,13 +236,13 @@ async function saveProjectConfig(config) {
|
||||
async function generateDisplayName(projectName, actualProjectDir = null) {
|
||||
// Use actual project directory if provided, otherwise decode from project name
|
||||
let projectPath = actualProjectDir || projectName.replace(/-/g, '/');
|
||||
|
||||
|
||||
// Try to read package.json from the project path
|
||||
try {
|
||||
const packageJsonPath = path.join(projectPath, 'package.json');
|
||||
const packageData = await fs.readFile(packageJsonPath, 'utf8');
|
||||
const packageJson = JSON.parse(packageData);
|
||||
|
||||
|
||||
// Return the name from package.json if it exists
|
||||
if (packageJson.name) {
|
||||
return packageJson.name;
|
||||
@@ -249,14 +250,14 @@ async function generateDisplayName(projectName, actualProjectDir = null) {
|
||||
} catch (error) {
|
||||
// Fall back to path-based naming if package.json doesn't exist or can't be read
|
||||
}
|
||||
|
||||
|
||||
// If it starts with /, it's an absolute path
|
||||
if (projectPath.startsWith('/')) {
|
||||
const parts = projectPath.split('/').filter(Boolean);
|
||||
// Return only the last folder name
|
||||
return parts[parts.length - 1] || projectPath;
|
||||
}
|
||||
|
||||
|
||||
return projectPath;
|
||||
}
|
||||
|
||||
@@ -281,14 +282,14 @@ async function extractProjectDirectory(projectName) {
|
||||
let latestTimestamp = 0;
|
||||
let latestCwd = null;
|
||||
let extractedPath;
|
||||
|
||||
|
||||
try {
|
||||
// Check if the project directory exists
|
||||
await fs.access(projectDir);
|
||||
|
||||
|
||||
const files = await fs.readdir(projectDir);
|
||||
const jsonlFiles = files.filter(file => file.endsWith('.jsonl'));
|
||||
|
||||
|
||||
if (jsonlFiles.length === 0) {
|
||||
// Fall back to decoded project name if no sessions
|
||||
extractedPath = projectName.replace(/-/g, '/');
|
||||
@@ -301,16 +302,16 @@ async function extractProjectDirectory(projectName) {
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity
|
||||
});
|
||||
|
||||
|
||||
for await (const line of rl) {
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const entry = JSON.parse(line);
|
||||
|
||||
|
||||
if (entry.cwd) {
|
||||
// Count occurrences of each cwd
|
||||
cwdCounts.set(entry.cwd, (cwdCounts.get(entry.cwd) || 0) + 1);
|
||||
|
||||
|
||||
// Track the most recent cwd
|
||||
const timestamp = new Date(entry.timestamp || 0).getTime();
|
||||
if (timestamp > latestTimestamp) {
|
||||
@@ -324,7 +325,7 @@ async function extractProjectDirectory(projectName) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Determine the best cwd to use
|
||||
if (cwdCounts.size === 0) {
|
||||
// No cwd found, fall back to decoded project name
|
||||
@@ -336,7 +337,7 @@ async function extractProjectDirectory(projectName) {
|
||||
// Multiple cwd values - prefer the most recent one if it has reasonable usage
|
||||
const mostRecentCount = cwdCounts.get(latestCwd) || 0;
|
||||
const maxCount = Math.max(...cwdCounts.values());
|
||||
|
||||
|
||||
// Use most recent if it has at least 25% of the max count
|
||||
if (mostRecentCount >= maxCount * 0.25) {
|
||||
extractedPath = latestCwd;
|
||||
@@ -349,19 +350,19 @@ async function extractProjectDirectory(projectName) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Fallback (shouldn't reach here)
|
||||
if (!extractedPath) {
|
||||
extractedPath = latestCwd || projectName.replace(/-/g, '/');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Cache the result
|
||||
projectDirectoryCache.set(projectName, extractedPath);
|
||||
|
||||
|
||||
return extractedPath;
|
||||
|
||||
|
||||
} catch (error) {
|
||||
// If the directory doesn't exist, just use the decoded project name
|
||||
if (error.code === 'ENOENT') {
|
||||
@@ -371,10 +372,10 @@ async function extractProjectDirectory(projectName) {
|
||||
// Fall back to decoded project name for other errors
|
||||
extractedPath = projectName.replace(/-/g, '/');
|
||||
}
|
||||
|
||||
|
||||
// Cache the fallback result too
|
||||
projectDirectoryCache.set(projectName, extractedPath);
|
||||
|
||||
|
||||
return extractedPath;
|
||||
}
|
||||
}
|
||||
@@ -408,91 +409,100 @@ async function getProjects(progressCallback = null) {
|
||||
totalProjects = directories.length + manualProjectsCount;
|
||||
|
||||
for (const entry of directories) {
|
||||
processedProjects++;
|
||||
processedProjects++;
|
||||
|
||||
// Emit progress
|
||||
if (progressCallback) {
|
||||
progressCallback({
|
||||
phase: 'loading',
|
||||
current: processedProjects,
|
||||
total: totalProjects,
|
||||
currentProject: entry.name
|
||||
});
|
||||
// Emit progress
|
||||
if (progressCallback) {
|
||||
progressCallback({
|
||||
phase: 'loading',
|
||||
current: processedProjects,
|
||||
total: totalProjects,
|
||||
currentProject: entry.name
|
||||
});
|
||||
}
|
||||
|
||||
// Extract actual project directory from JSONL sessions
|
||||
const actualProjectDir = await extractProjectDirectory(entry.name);
|
||||
|
||||
// Get display name from config or generate one
|
||||
const customName = config[entry.name]?.displayName;
|
||||
const autoDisplayName = await generateDisplayName(entry.name, actualProjectDir);
|
||||
const fullPath = actualProjectDir;
|
||||
|
||||
const project = {
|
||||
name: entry.name,
|
||||
path: actualProjectDir,
|
||||
displayName: customName || autoDisplayName,
|
||||
fullPath: fullPath,
|
||||
isCustomName: !!customName,
|
||||
sessions: [],
|
||||
geminiSessions: [],
|
||||
sessionMeta: {
|
||||
hasMore: false,
|
||||
total: 0
|
||||
}
|
||||
};
|
||||
|
||||
// Extract actual project directory from JSONL sessions
|
||||
const actualProjectDir = await extractProjectDirectory(entry.name);
|
||||
|
||||
// Get display name from config or generate one
|
||||
const customName = config[entry.name]?.displayName;
|
||||
const autoDisplayName = await generateDisplayName(entry.name, actualProjectDir);
|
||||
const fullPath = actualProjectDir;
|
||||
|
||||
const project = {
|
||||
name: entry.name,
|
||||
path: actualProjectDir,
|
||||
displayName: customName || autoDisplayName,
|
||||
fullPath: fullPath,
|
||||
isCustomName: !!customName,
|
||||
sessions: [],
|
||||
sessionMeta: {
|
||||
hasMore: false,
|
||||
total: 0
|
||||
}
|
||||
// Try to get sessions for this project (just first 5 for performance)
|
||||
try {
|
||||
const sessionResult = await getSessions(entry.name, 5, 0);
|
||||
project.sessions = sessionResult.sessions || [];
|
||||
project.sessionMeta = {
|
||||
hasMore: sessionResult.hasMore,
|
||||
total: sessionResult.total
|
||||
};
|
||||
|
||||
// Try to get sessions for this project (just first 5 for performance)
|
||||
try {
|
||||
const sessionResult = await getSessions(entry.name, 5, 0);
|
||||
project.sessions = sessionResult.sessions || [];
|
||||
project.sessionMeta = {
|
||||
hasMore: sessionResult.hasMore,
|
||||
total: sessionResult.total
|
||||
};
|
||||
} catch (e) {
|
||||
console.warn(`Could not load sessions for project ${entry.name}:`, e.message);
|
||||
project.sessionMeta = {
|
||||
hasMore: false,
|
||||
total: 0
|
||||
};
|
||||
}
|
||||
|
||||
// Also fetch Cursor sessions for this project
|
||||
try {
|
||||
project.cursorSessions = await getCursorSessions(actualProjectDir);
|
||||
} catch (e) {
|
||||
console.warn(`Could not load Cursor sessions for project ${entry.name}:`, e.message);
|
||||
project.cursorSessions = [];
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(`Could not load sessions for project ${entry.name}:`, e.message);
|
||||
project.sessionMeta = {
|
||||
hasMore: false,
|
||||
total: 0
|
||||
};
|
||||
}
|
||||
|
||||
// Also fetch Codex sessions for this project
|
||||
try {
|
||||
project.codexSessions = await getCodexSessions(actualProjectDir, {
|
||||
indexRef: codexSessionsIndexRef,
|
||||
});
|
||||
} catch (e) {
|
||||
console.warn(`Could not load Codex sessions for project ${entry.name}:`, e.message);
|
||||
project.codexSessions = [];
|
||||
}
|
||||
// Also fetch Cursor sessions for this project
|
||||
try {
|
||||
project.cursorSessions = await getCursorSessions(actualProjectDir);
|
||||
} catch (e) {
|
||||
console.warn(`Could not load Cursor sessions for project ${entry.name}:`, e.message);
|
||||
project.cursorSessions = [];
|
||||
}
|
||||
|
||||
// Add TaskMaster detection
|
||||
try {
|
||||
const taskMasterResult = await detectTaskMasterFolder(actualProjectDir);
|
||||
project.taskmaster = {
|
||||
hasTaskmaster: taskMasterResult.hasTaskmaster,
|
||||
hasEssentialFiles: taskMasterResult.hasEssentialFiles,
|
||||
metadata: taskMasterResult.metadata,
|
||||
status: taskMasterResult.hasTaskmaster && taskMasterResult.hasEssentialFiles ? 'configured' : 'not-configured'
|
||||
};
|
||||
} catch (e) {
|
||||
console.warn(`Could not detect TaskMaster for project ${entry.name}:`, e.message);
|
||||
project.taskmaster = {
|
||||
hasTaskmaster: false,
|
||||
hasEssentialFiles: false,
|
||||
metadata: null,
|
||||
status: 'error'
|
||||
};
|
||||
}
|
||||
// Also fetch Codex sessions for this project
|
||||
try {
|
||||
project.codexSessions = await getCodexSessions(actualProjectDir, {
|
||||
indexRef: codexSessionsIndexRef,
|
||||
});
|
||||
} catch (e) {
|
||||
console.warn(`Could not load Codex sessions for project ${entry.name}:`, e.message);
|
||||
project.codexSessions = [];
|
||||
}
|
||||
|
||||
// Also fetch Gemini sessions for this project
|
||||
try {
|
||||
project.geminiSessions = sessionManager.getProjectSessions(actualProjectDir) || [];
|
||||
} catch (e) {
|
||||
console.warn(`Could not load Gemini sessions for project ${entry.name}:`, e.message);
|
||||
project.geminiSessions = [];
|
||||
}
|
||||
|
||||
// Add TaskMaster detection
|
||||
try {
|
||||
const taskMasterResult = await detectTaskMasterFolder(actualProjectDir);
|
||||
project.taskmaster = {
|
||||
hasTaskmaster: taskMasterResult.hasTaskmaster,
|
||||
hasEssentialFiles: taskMasterResult.hasEssentialFiles,
|
||||
metadata: taskMasterResult.metadata,
|
||||
status: taskMasterResult.hasTaskmaster && taskMasterResult.hasEssentialFiles ? 'configured' : 'not-configured'
|
||||
};
|
||||
} catch (e) {
|
||||
console.warn(`Could not detect TaskMaster for project ${entry.name}:`, e.message);
|
||||
project.taskmaster = {
|
||||
hasTaskmaster: false,
|
||||
hasEssentialFiles: false,
|
||||
metadata: null,
|
||||
status: 'error'
|
||||
};
|
||||
}
|
||||
|
||||
projects.push(project);
|
||||
}
|
||||
@@ -506,7 +516,7 @@ async function getProjects(progressCallback = null) {
|
||||
.filter(([name, cfg]) => cfg.manuallyAdded)
|
||||
.length;
|
||||
}
|
||||
|
||||
|
||||
// Add manually configured projects that don't exist as folders yet
|
||||
for (const [projectName, projectConfig] of Object.entries(config)) {
|
||||
if (!existingProjects.has(projectName) && projectConfig.manuallyAdded) {
|
||||
@@ -524,7 +534,7 @@ async function getProjects(progressCallback = null) {
|
||||
|
||||
// Use the original path if available, otherwise extract from potential sessions
|
||||
let actualProjectDir = projectConfig.originalPath;
|
||||
|
||||
|
||||
if (!actualProjectDir) {
|
||||
try {
|
||||
actualProjectDir = await extractProjectDirectory(projectName);
|
||||
@@ -533,21 +543,22 @@ async function getProjects(progressCallback = null) {
|
||||
actualProjectDir = projectName.replace(/-/g, '/');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const project = {
|
||||
name: projectName,
|
||||
path: actualProjectDir,
|
||||
displayName: projectConfig.displayName || await generateDisplayName(projectName, actualProjectDir),
|
||||
fullPath: actualProjectDir,
|
||||
isCustomName: !!projectConfig.displayName,
|
||||
isManuallyAdded: true,
|
||||
sessions: [],
|
||||
sessionMeta: {
|
||||
hasMore: false,
|
||||
total: 0
|
||||
},
|
||||
cursorSessions: [],
|
||||
codexSessions: []
|
||||
name: projectName,
|
||||
path: actualProjectDir,
|
||||
displayName: projectConfig.displayName || await generateDisplayName(projectName, actualProjectDir),
|
||||
fullPath: actualProjectDir,
|
||||
isCustomName: !!projectConfig.displayName,
|
||||
isManuallyAdded: true,
|
||||
sessions: [],
|
||||
geminiSessions: [],
|
||||
sessionMeta: {
|
||||
hasMore: false,
|
||||
total: 0
|
||||
},
|
||||
cursorSessions: [],
|
||||
codexSessions: []
|
||||
};
|
||||
|
||||
// Try to fetch Cursor sessions for manual projects too
|
||||
@@ -566,16 +577,23 @@ async function getProjects(progressCallback = null) {
|
||||
console.warn(`Could not load Codex sessions for manual project ${projectName}:`, e.message);
|
||||
}
|
||||
|
||||
// Try to fetch Gemini sessions for manual projects too
|
||||
try {
|
||||
project.geminiSessions = sessionManager.getProjectSessions(actualProjectDir) || [];
|
||||
} catch (e) {
|
||||
console.warn(`Could not load Gemini sessions for manual project ${projectName}:`, e.message);
|
||||
}
|
||||
|
||||
// Add TaskMaster detection for manual projects
|
||||
try {
|
||||
const taskMasterResult = await detectTaskMasterFolder(actualProjectDir);
|
||||
|
||||
|
||||
// Determine TaskMaster status
|
||||
let taskMasterStatus = 'not-configured';
|
||||
if (taskMasterResult.hasTaskmaster && taskMasterResult.hasEssentialFiles) {
|
||||
taskMasterStatus = 'taskmaster-only'; // We don't check MCP for manual projects in bulk
|
||||
}
|
||||
|
||||
|
||||
project.taskmaster = {
|
||||
status: taskMasterStatus,
|
||||
hasTaskmaster: taskMasterResult.hasTaskmaster,
|
||||
@@ -591,7 +609,7 @@ async function getProjects(progressCallback = null) {
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
projects.push(project);
|
||||
}
|
||||
}
|
||||
@@ -616,11 +634,11 @@ async function getSessions(projectName, limit = 5, offset = 0) {
|
||||
// agent-*.jsonl files contain session start data at this point. This needs to be revisited
|
||||
// periodically to make sure only accurate data is there and no new functionality is added there
|
||||
const jsonlFiles = files.filter(file => file.endsWith('.jsonl') && !file.startsWith('agent-'));
|
||||
|
||||
|
||||
if (jsonlFiles.length === 0) {
|
||||
return { sessions: [], hasMore: false, total: 0 };
|
||||
}
|
||||
|
||||
|
||||
// Sort files by modification time (newest first)
|
||||
const filesWithStats = await Promise.all(
|
||||
jsonlFiles.map(async (file) => {
|
||||
@@ -630,37 +648,37 @@ async function getSessions(projectName, limit = 5, offset = 0) {
|
||||
})
|
||||
);
|
||||
filesWithStats.sort((a, b) => b.mtime - a.mtime);
|
||||
|
||||
|
||||
const allSessions = new Map();
|
||||
const allEntries = [];
|
||||
const uuidToSessionMap = new Map();
|
||||
|
||||
|
||||
// Collect all sessions and entries from all files
|
||||
for (const { file } of filesWithStats) {
|
||||
const jsonlFile = path.join(projectDir, file);
|
||||
const result = await parseJsonlSessions(jsonlFile);
|
||||
|
||||
|
||||
result.sessions.forEach(session => {
|
||||
if (!allSessions.has(session.id)) {
|
||||
allSessions.set(session.id, session);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
allEntries.push(...result.entries);
|
||||
|
||||
|
||||
// Early exit optimization for large projects
|
||||
if (allSessions.size >= (limit + offset) * 2 && allEntries.length >= Math.min(3, filesWithStats.length)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Build UUID-to-session mapping for timeline detection
|
||||
allEntries.forEach(entry => {
|
||||
if (entry.uuid && entry.sessionId) {
|
||||
uuidToSessionMap.set(entry.uuid, entry.sessionId);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Group sessions by first user message ID
|
||||
const sessionGroups = new Map(); // firstUserMsgId -> { latestSession, allSessions[] }
|
||||
const sessionToFirstUserMsgId = new Map(); // sessionId -> firstUserMsgId
|
||||
@@ -722,7 +740,7 @@ async function getSessions(projectName, limit = 5, offset = 0) {
|
||||
const total = visibleSessions.length;
|
||||
const paginatedSessions = visibleSessions.slice(offset, offset + limit);
|
||||
const hasMore = offset + limit < total;
|
||||
|
||||
|
||||
return {
|
||||
sessions: paginatedSessions,
|
||||
hasMore,
|
||||
@@ -926,8 +944,8 @@ async function parseAgentTools(filePath) {
|
||||
if (tool) {
|
||||
tool.toolResult = {
|
||||
content: typeof part.content === 'string' ? part.content :
|
||||
Array.isArray(part.content) ? part.content.map(c => c.text || '').join('\n') :
|
||||
JSON.stringify(part.content),
|
||||
Array.isArray(part.content) ? part.content.map(c => c.text || '').join('\n') :
|
||||
JSON.stringify(part.content),
|
||||
isError: Boolean(part.is_error)
|
||||
};
|
||||
}
|
||||
@@ -1015,7 +1033,6 @@ async function getSessionMessages(projectName, sessionId, limit = null, offset =
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort messages by timestamp
|
||||
const sortedMessages = messages.sort((a, b) =>
|
||||
new Date(a.timestamp || 0) - new Date(b.timestamp || 0)
|
||||
@@ -1051,7 +1068,7 @@ async function getSessionMessages(projectName, sessionId, limit = null, offset =
|
||||
// Rename a project's display name
|
||||
async function renameProject(projectName, newDisplayName) {
|
||||
const config = await loadProjectConfig();
|
||||
|
||||
|
||||
if (!newDisplayName || newDisplayName.trim() === '') {
|
||||
// Remove custom name if empty, will fall back to auto-generated
|
||||
delete config[projectName];
|
||||
@@ -1061,7 +1078,7 @@ async function renameProject(projectName, newDisplayName) {
|
||||
displayName: newDisplayName.trim()
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
await saveProjectConfig(config);
|
||||
return true;
|
||||
}
|
||||
@@ -1069,21 +1086,21 @@ async function renameProject(projectName, newDisplayName) {
|
||||
// Delete a session from a project
|
||||
async function deleteSession(projectName, sessionId) {
|
||||
const projectDir = path.join(os.homedir(), '.claude', 'projects', projectName);
|
||||
|
||||
|
||||
try {
|
||||
const files = await fs.readdir(projectDir);
|
||||
const jsonlFiles = files.filter(file => file.endsWith('.jsonl'));
|
||||
|
||||
|
||||
if (jsonlFiles.length === 0) {
|
||||
throw new Error('No session files found for this project');
|
||||
}
|
||||
|
||||
|
||||
// Check all JSONL files to find which one contains the session
|
||||
for (const file of jsonlFiles) {
|
||||
const jsonlFile = path.join(projectDir, file);
|
||||
const content = await fs.readFile(jsonlFile, 'utf8');
|
||||
const lines = content.split('\n').filter(line => line.trim());
|
||||
|
||||
|
||||
// Check if this file contains the session
|
||||
const hasSession = lines.some(line => {
|
||||
try {
|
||||
@@ -1093,7 +1110,7 @@ async function deleteSession(projectName, sessionId) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
if (hasSession) {
|
||||
// Filter out all entries for this session
|
||||
const filteredLines = lines.filter(line => {
|
||||
@@ -1104,13 +1121,13 @@ async function deleteSession(projectName, sessionId) {
|
||||
return true; // Keep malformed lines
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Write back the filtered content
|
||||
await fs.writeFile(jsonlFile, filteredLines.join('\n') + (filteredLines.length > 0 ? '\n' : ''));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
throw new Error(`Session ${sessionId} not found in any files`);
|
||||
} catch (error) {
|
||||
console.error(`Error deleting session ${sessionId} from project ${projectName}:`, error);
|
||||
@@ -1220,10 +1237,10 @@ async function addProjectManually(projectPath, displayName = null) {
|
||||
if (displayName) {
|
||||
config[projectName].displayName = displayName;
|
||||
}
|
||||
|
||||
|
||||
await saveProjectConfig(config);
|
||||
|
||||
|
||||
|
||||
|
||||
return {
|
||||
name: projectName,
|
||||
path: absolutePath,
|
||||
@@ -1241,7 +1258,7 @@ async function getCursorSessions(projectPath) {
|
||||
// Calculate cwdID hash for the project path (Cursor uses MD5 hash)
|
||||
const cwdId = crypto.createHash('md5').update(projectPath).digest('hex');
|
||||
const cursorChatsPath = path.join(os.homedir(), '.cursor', 'chats', cwdId);
|
||||
|
||||
|
||||
// Check if the directory exists
|
||||
try {
|
||||
await fs.access(cursorChatsPath);
|
||||
@@ -1249,25 +1266,25 @@ async function getCursorSessions(projectPath) {
|
||||
// No sessions for this project
|
||||
return [];
|
||||
}
|
||||
|
||||
|
||||
// List all session directories
|
||||
const sessionDirs = await fs.readdir(cursorChatsPath);
|
||||
const sessions = [];
|
||||
|
||||
|
||||
for (const sessionId of sessionDirs) {
|
||||
const sessionPath = path.join(cursorChatsPath, sessionId);
|
||||
const storeDbPath = path.join(sessionPath, 'store.db');
|
||||
|
||||
|
||||
try {
|
||||
// Check if store.db exists
|
||||
await fs.access(storeDbPath);
|
||||
|
||||
|
||||
// Capture store.db mtime as a reliable fallback timestamp
|
||||
let dbStatMtimeMs = null;
|
||||
try {
|
||||
const stat = await fs.stat(storeDbPath);
|
||||
dbStatMtimeMs = stat.mtimeMs;
|
||||
} catch (_) {}
|
||||
} catch (_) { }
|
||||
|
||||
// Open SQLite database
|
||||
const db = await open({
|
||||
@@ -1275,12 +1292,12 @@ async function getCursorSessions(projectPath) {
|
||||
driver: sqlite3.Database,
|
||||
mode: sqlite3.OPEN_READONLY
|
||||
});
|
||||
|
||||
|
||||
// Get metadata from meta table
|
||||
const metaRows = await db.all(`
|
||||
SELECT key, value FROM meta
|
||||
`);
|
||||
|
||||
|
||||
// Parse metadata
|
||||
let metadata = {};
|
||||
for (const row of metaRows) {
|
||||
@@ -1299,17 +1316,17 @@ async function getCursorSessions(projectPath) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Get message count
|
||||
const messageCountResult = await db.get(`
|
||||
SELECT COUNT(*) as count FROM blobs
|
||||
`);
|
||||
|
||||
|
||||
await db.close();
|
||||
|
||||
|
||||
// Extract session info
|
||||
const sessionName = metadata.title || metadata.sessionTitle || 'Untitled Session';
|
||||
|
||||
|
||||
// Determine timestamp - prefer createdAt from metadata, fall back to db file mtime
|
||||
let createdAt = null;
|
||||
if (metadata.createdAt) {
|
||||
@@ -1319,7 +1336,7 @@ async function getCursorSessions(projectPath) {
|
||||
} else {
|
||||
createdAt = new Date().toISOString();
|
||||
}
|
||||
|
||||
|
||||
sessions.push({
|
||||
id: sessionId,
|
||||
name: sessionName,
|
||||
@@ -1328,18 +1345,18 @@ async function getCursorSessions(projectPath) {
|
||||
messageCount: messageCountResult.count || 0,
|
||||
projectPath: projectPath
|
||||
});
|
||||
|
||||
|
||||
} catch (error) {
|
||||
console.warn(`Could not read Cursor session ${sessionId}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Sort sessions by creation time (newest first)
|
||||
sessions.sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt));
|
||||
|
||||
|
||||
// Return only the first 5 sessions for performance
|
||||
return sessions.slice(0, 5);
|
||||
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error fetching Cursor sessions:', error);
|
||||
return [];
|
||||
@@ -1785,7 +1802,7 @@ async function deleteCodexSession(sessionId) {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
} catch (error) { }
|
||||
return files;
|
||||
};
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import { addProjectManually } from '../projects.js';
|
||||
import { queryClaudeSDK } from '../claude-sdk.js';
|
||||
import { spawnCursor } from '../cursor-cli.js';
|
||||
import { queryCodex } from '../openai-codex.js';
|
||||
import { spawnGemini } from '../gemini-cli.js';
|
||||
import { Octokit } from '@octokit/rest';
|
||||
import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../shared/modelConstants.js';
|
||||
import { IS_PLATFORM } from '../constants/config.js';
|
||||
@@ -629,7 +630,7 @@ class ResponseCollector {
|
||||
* - Source for auto-generated branch names (if createBranch=true and no branchName)
|
||||
* - Fallback for PR title if no commits are made
|
||||
*
|
||||
* @param {string} provider - (Optional) AI provider to use. Options: 'claude' | 'cursor'
|
||||
* @param {string} provider - (Optional) AI provider to use. Options: 'claude' | 'cursor' | 'codex' | 'gemini'
|
||||
* Default: 'claude'
|
||||
*
|
||||
* @param {boolean} stream - (Optional) Enable Server-Sent Events (SSE) streaming for real-time updates.
|
||||
@@ -747,7 +748,7 @@ class ResponseCollector {
|
||||
* Input Validations (400 Bad Request):
|
||||
* - Either githubUrl OR projectPath must be provided (not neither)
|
||||
* - message must be non-empty string
|
||||
* - provider must be 'claude' or 'cursor'
|
||||
* - provider must be 'claude', 'cursor', 'codex', or 'gemini'
|
||||
* - createBranch/createPR requires githubUrl OR projectPath (not neither)
|
||||
* - branchName must pass Git naming rules (if provided)
|
||||
*
|
||||
@@ -855,8 +856,8 @@ router.post('/', validateExternalApiKey, async (req, res) => {
|
||||
return res.status(400).json({ error: 'message is required' });
|
||||
}
|
||||
|
||||
if (!['claude', 'cursor', 'codex'].includes(provider)) {
|
||||
return res.status(400).json({ error: 'provider must be "claude", "cursor", or "codex"' });
|
||||
if (!['claude', 'cursor', 'codex', 'gemini'].includes(provider)) {
|
||||
return res.status(400).json({ error: 'provider must be "claude", "cursor", "codex", or "gemini"' });
|
||||
}
|
||||
|
||||
// Validate GitHub branch/PR creation requirements
|
||||
@@ -971,6 +972,16 @@ router.post('/', validateExternalApiKey, async (req, res) => {
|
||||
model: model || CODEX_MODELS.DEFAULT,
|
||||
permissionMode: 'bypassPermissions'
|
||||
}, writer);
|
||||
} else if (provider === 'gemini') {
|
||||
console.log('✨ Starting Gemini CLI session');
|
||||
|
||||
await spawnGemini(message.trim(), {
|
||||
projectPath: finalProjectPath,
|
||||
cwd: finalProjectPath,
|
||||
sessionId: null,
|
||||
model: model,
|
||||
skipPermissions: true // CLI mode bypasses permissions
|
||||
}, writer);
|
||||
}
|
||||
|
||||
// Handle GitHub branch and PR creation after successful agent completion
|
||||
|
||||
@@ -74,6 +74,46 @@ router.get('/codex/status', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/gemini/status', async (req, res) => {
|
||||
try {
|
||||
const result = await checkGeminiCredentials();
|
||||
|
||||
res.json({
|
||||
authenticated: result.authenticated,
|
||||
email: result.email,
|
||||
error: result.error
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error checking Gemini auth status:', error);
|
||||
res.status(500).json({
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Checks Claude authentication credentials using two methods with priority order:
|
||||
*
|
||||
* Priority 1: ANTHROPIC_API_KEY environment variable
|
||||
* Priority 2: ~/.claude/.credentials.json OAuth tokens
|
||||
*
|
||||
* The Claude Agent SDK prioritizes environment variables over authenticated subscriptions.
|
||||
* This matching behavior ensures consistency with how the SDK authenticates.
|
||||
*
|
||||
* References:
|
||||
* - https://support.claude.com/en/articles/12304248-managing-api-key-environment-variables-in-claude-code
|
||||
* "Claude Code prioritizes environment variable API keys over authenticated subscriptions"
|
||||
* - https://platform.claude.com/docs/en/agent-sdk/overview
|
||||
* SDK authentication documentation
|
||||
*
|
||||
* @returns {Promise<Object>} Authentication status with { authenticated, email, method }
|
||||
* - authenticated: boolean indicating if valid credentials exist
|
||||
* - email: user email or auth method identifier
|
||||
* - method: 'api_key' for env var, 'credentials_file' for OAuth tokens
|
||||
*/
|
||||
async function checkClaudeCredentials() {
|
||||
try {
|
||||
const credPath = path.join(os.homedir(), '.claude', '.credentials.json');
|
||||
@@ -260,4 +300,78 @@ async function checkCodexCredentials() {
|
||||
}
|
||||
}
|
||||
|
||||
async function checkGeminiCredentials() {
|
||||
if (process.env.GEMINI_API_KEY && process.env.GEMINI_API_KEY.trim()) {
|
||||
return {
|
||||
authenticated: true,
|
||||
email: 'API Key Auth'
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const credsPath = path.join(os.homedir(), '.gemini', 'oauth_creds.json');
|
||||
const content = await fs.readFile(credsPath, 'utf8');
|
||||
const creds = JSON.parse(content);
|
||||
|
||||
if (creds.access_token) {
|
||||
let email = 'OAuth Session';
|
||||
|
||||
try {
|
||||
// Validate token against Google API
|
||||
const tokenRes = await fetch(`https://oauth2.googleapis.com/tokeninfo?access_token=${creds.access_token}`);
|
||||
if (tokenRes.ok) {
|
||||
const tokenInfo = await tokenRes.json();
|
||||
if (tokenInfo.email) {
|
||||
email = tokenInfo.email;
|
||||
}
|
||||
} else if (!creds.refresh_token) {
|
||||
// Token invalid and no refresh token available
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Access token invalid and no refresh token found'
|
||||
};
|
||||
} else {
|
||||
// Token might be expired but we have a refresh token, so CLI will refresh it
|
||||
try {
|
||||
const accPath = path.join(os.homedir(), '.gemini', 'google_accounts.json');
|
||||
const accContent = await fs.readFile(accPath, 'utf8');
|
||||
const accounts = JSON.parse(accContent);
|
||||
if (accounts.active) {
|
||||
email = accounts.active;
|
||||
}
|
||||
} catch (e) { }
|
||||
}
|
||||
} catch (e) {
|
||||
// Network error, fallback to checking local accounts file
|
||||
try {
|
||||
const accPath = path.join(os.homedir(), '.gemini', 'google_accounts.json');
|
||||
const accContent = await fs.readFile(accPath, 'utf8');
|
||||
const accounts = JSON.parse(accContent);
|
||||
if (accounts.active) {
|
||||
email = accounts.active;
|
||||
}
|
||||
} catch (err) { }
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: true,
|
||||
email: email
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'No valid tokens found in oauth_creds'
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
authenticated: false,
|
||||
email: null,
|
||||
error: 'Gemini CLI not configured'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default router;
|
||||
|
||||
46
server/routes/gemini.js
Normal file
46
server/routes/gemini.js
Normal file
@@ -0,0 +1,46 @@
|
||||
import express from 'express';
|
||||
import sessionManager from '../sessionManager.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/sessions/:sessionId/messages', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
|
||||
if (!sessionId || typeof sessionId !== 'string' || !/^[a-zA-Z0-9_.-]{1,100}$/.test(sessionId)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid session ID format' });
|
||||
}
|
||||
|
||||
const messages = sessionManager.getSessionMessages(sessionId);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
messages: messages,
|
||||
total: messages.length,
|
||||
hasMore: false,
|
||||
offset: 0,
|
||||
limit: messages.length
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching Gemini session messages:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.delete('/sessions/:sessionId', async (req, res) => {
|
||||
try {
|
||||
const { sessionId } = req.params;
|
||||
|
||||
if (!sessionId || typeof sessionId !== 'string' || !/^[a-zA-Z0-9_.-]{1,100}$/.test(sessionId)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid session ID format' });
|
||||
}
|
||||
|
||||
await sessionManager.deleteSession(sessionId);
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
console.error(`Error deleting Gemini session ${req.params.sessionId}:`, error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
226
server/sessionManager.js
Normal file
226
server/sessionManager.js
Normal file
@@ -0,0 +1,226 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
|
||||
class SessionManager {
|
||||
constructor() {
|
||||
// Store sessions in memory with conversation history
|
||||
this.sessions = new Map();
|
||||
this.maxSessions = 100;
|
||||
this.sessionsDir = path.join(os.homedir(), '.gemini', 'sessions');
|
||||
this.ready = this.init();
|
||||
}
|
||||
|
||||
async init() {
|
||||
await this.initSessionsDir();
|
||||
await this.loadSessions();
|
||||
}
|
||||
|
||||
async initSessionsDir() {
|
||||
try {
|
||||
await fs.mkdir(this.sessionsDir, { recursive: true });
|
||||
} catch (error) {
|
||||
// console.error('Error creating sessions directory:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new session
|
||||
createSession(sessionId, projectPath) {
|
||||
const session = {
|
||||
id: sessionId,
|
||||
projectPath: projectPath,
|
||||
messages: [],
|
||||
createdAt: new Date(),
|
||||
lastActivity: new Date()
|
||||
};
|
||||
|
||||
// Evict oldest session from memory if we exceed limit
|
||||
if (this.sessions.size >= this.maxSessions) {
|
||||
const oldestKey = this.sessions.keys().next().value;
|
||||
if (oldestKey) this.sessions.delete(oldestKey);
|
||||
}
|
||||
|
||||
this.sessions.set(sessionId, session);
|
||||
this.saveSession(sessionId);
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
// Add a message to session
|
||||
addMessage(sessionId, role, content) {
|
||||
let session = this.sessions.get(sessionId);
|
||||
|
||||
if (!session) {
|
||||
// Create session if it doesn't exist
|
||||
session = this.createSession(sessionId, '');
|
||||
}
|
||||
|
||||
const message = {
|
||||
role: role, // 'user' or 'assistant'
|
||||
content: content,
|
||||
timestamp: new Date()
|
||||
};
|
||||
|
||||
session.messages.push(message);
|
||||
session.lastActivity = new Date();
|
||||
|
||||
this.saveSession(sessionId);
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
// Get session by ID
|
||||
getSession(sessionId) {
|
||||
return this.sessions.get(sessionId);
|
||||
}
|
||||
|
||||
// Get all sessions for a project
|
||||
getProjectSessions(projectPath) {
|
||||
const sessions = [];
|
||||
|
||||
for (const [id, session] of this.sessions) {
|
||||
if (session.projectPath === projectPath) {
|
||||
sessions.push({
|
||||
id: session.id,
|
||||
summary: this.getSessionSummary(session),
|
||||
messageCount: session.messages.length,
|
||||
lastActivity: session.lastActivity
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return sessions.sort((a, b) =>
|
||||
new Date(b.lastActivity) - new Date(a.lastActivity)
|
||||
);
|
||||
}
|
||||
|
||||
// Get session summary
|
||||
getSessionSummary(session) {
|
||||
if (session.messages.length === 0) {
|
||||
return 'New Session';
|
||||
}
|
||||
|
||||
// Find first user message
|
||||
const firstUserMessage = session.messages.find(m => m.role === 'user');
|
||||
if (firstUserMessage) {
|
||||
const content = firstUserMessage.content;
|
||||
return content.length > 50 ? content.substring(0, 50) + '...' : content;
|
||||
}
|
||||
|
||||
return 'New Session';
|
||||
}
|
||||
|
||||
// Build conversation context for Gemini
|
||||
buildConversationContext(sessionId, maxMessages = 10) {
|
||||
const session = this.sessions.get(sessionId);
|
||||
|
||||
if (!session || session.messages.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Get last N messages for context
|
||||
const recentMessages = session.messages.slice(-maxMessages);
|
||||
|
||||
let context = 'Here is the conversation history:\n\n';
|
||||
|
||||
for (const msg of recentMessages) {
|
||||
if (msg.role === 'user') {
|
||||
context += `User: ${msg.content}\n`;
|
||||
} else {
|
||||
context += `Assistant: ${msg.content}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
context += '\nBased on the conversation history above, please answer the following:\n';
|
||||
|
||||
return context;
|
||||
}
|
||||
|
||||
// Prevent path traversal
|
||||
_safeFilePath(sessionId) {
|
||||
const safeId = String(sessionId).replace(/[/\\]|\.\./g, '');
|
||||
return path.join(this.sessionsDir, `${safeId}.json`);
|
||||
}
|
||||
|
||||
// Save session to disk
|
||||
async saveSession(sessionId) {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) return;
|
||||
|
||||
try {
|
||||
const filePath = this._safeFilePath(sessionId);
|
||||
await fs.writeFile(filePath, JSON.stringify(session, null, 2));
|
||||
} catch (error) {
|
||||
// console.error('Error saving session:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load sessions from disk
|
||||
async loadSessions() {
|
||||
try {
|
||||
const files = await fs.readdir(this.sessionsDir);
|
||||
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.json')) {
|
||||
try {
|
||||
const filePath = path.join(this.sessionsDir, file);
|
||||
const data = await fs.readFile(filePath, 'utf8');
|
||||
const session = JSON.parse(data);
|
||||
|
||||
// Convert dates
|
||||
session.createdAt = new Date(session.createdAt);
|
||||
session.lastActivity = new Date(session.lastActivity);
|
||||
session.messages.forEach(msg => {
|
||||
msg.timestamp = new Date(msg.timestamp);
|
||||
});
|
||||
|
||||
this.sessions.set(session.id, session);
|
||||
} catch (error) {
|
||||
// console.error(`Error loading session ${file}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enforce eviction after loading to prevent massive memory usage
|
||||
while (this.sessions.size > this.maxSessions) {
|
||||
const oldestKey = this.sessions.keys().next().value;
|
||||
if (oldestKey) this.sessions.delete(oldestKey);
|
||||
}
|
||||
} catch (error) {
|
||||
// console.error('Error loading sessions:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Delete a session
|
||||
async deleteSession(sessionId) {
|
||||
this.sessions.delete(sessionId);
|
||||
|
||||
try {
|
||||
const filePath = this._safeFilePath(sessionId);
|
||||
await fs.unlink(filePath);
|
||||
} catch (error) {
|
||||
// console.error('Error deleting session file:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Get session messages for display
|
||||
getSessionMessages(sessionId) {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) return [];
|
||||
|
||||
return session.messages.map(msg => ({
|
||||
type: 'message',
|
||||
message: {
|
||||
role: msg.role,
|
||||
content: msg.content
|
||||
},
|
||||
timestamp: msg.timestamp.toISOString()
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
const sessionManager = new SessionManager();
|
||||
|
||||
export const ready = sessionManager.ready;
|
||||
export default sessionManager;
|
||||
Reference in New Issue
Block a user