From 60c8bda7552021541d6147c4fea85b73c6a30cf6 Mon Sep 17 00:00:00 2001 From: simosmik Date: Mon, 29 Dec 2025 16:19:09 +0000 Subject: [PATCH] fix: pass model parameter to Claude and Codex SDKs Previously, the model parameter was accepted by the /api/agent endpoint and extracted from requests, but was never passed through to the Claude SDK or Codex SDK, causing all requests to use default models regardless of user selection. Changes: - Add model parameter to queryClaudeSDK() options in routes/agent.js - Add model to threadOptions in openai-codex.js - Remove unused /cost slash command and PRICING constants - Centralize all model definitions in shared/modelConstants.js - Update API documentation to dynamically load models from constants --- public/api-docs.html | 38 +++++++++++++++---- server/claude-sdk.js | 3 +- server/openai-codex.js | 3 +- server/routes/agent.js | 15 ++++++-- server/routes/commands.js | 63 +++---------------------------- server/routes/cursor.js | 3 +- shared/modelConstants.js | 65 ++++++++++++++++++++++++++++++++ src/components/ChatInterface.jsx | 55 ++++++++------------------- 8 files changed, 134 insertions(+), 111 deletions(-) create mode 100644 shared/modelConstants.js diff --git a/public/api-docs.html b/public/api-docs.html index fbb05fc..ec671ec 100644 --- a/public/api-docs.html +++ b/public/api-docs.html @@ -489,7 +489,7 @@ http://localhost:3001/api/agent -

Trigger an AI agent (Claude or Cursor) to work on a project.

+

Trigger an AI agent (Claude, Cursor, or Codex) to work on a project.

Request Body Parameters

@@ -524,7 +524,7 @@ - + @@ -536,7 +536,9 @@ - + @@ -818,31 +820,51 @@ data: {"type":"done"} - diff --git a/server/claude-sdk.js b/server/claude-sdk.js index 05b6df1..67ba103 100644 --- a/server/claude-sdk.js +++ b/server/claude-sdk.js @@ -16,6 +16,7 @@ import { query } from '@anthropic-ai/claude-agent-sdk'; import { promises as fs } from 'fs'; import path from 'path'; import os from 'os'; +import { CLAUDE_MODELS } from '../shared/modelConstants.js'; // Session tracking: Map of session IDs to active query instances const activeSessions = new Map(); @@ -77,7 +78,7 @@ function mapCliOptionsToSDK(options = {}) { // Map model (default to sonnet) // Valid models: sonnet, opus, haiku, opusplan, sonnet[1m] - sdkOptions.model = options.model || 'sonnet'; + sdkOptions.model = options.model || CLAUDE_MODELS.DEFAULT; console.log(`Using model: ${sdkOptions.model}`); // Map system prompt configuration diff --git a/server/openai-codex.js b/server/openai-codex.js index d6a5b43..f4f00ef 100644 --- a/server/openai-codex.js +++ b/server/openai-codex.js @@ -213,7 +213,8 @@ export async function queryCodex(command, options = {}, ws) { workingDirectory, skipGitRepoCheck: true, sandboxMode, - approvalPolicy + approvalPolicy, + model }; // Start or resume thread diff --git a/server/routes/agent.js b/server/routes/agent.js index 646b875..04e6339 100644 --- a/server/routes/agent.js +++ b/server/routes/agent.js @@ -10,6 +10,7 @@ import { queryClaudeSDK } from '../claude-sdk.js'; import { spawnCursor } from '../cursor-cli.js'; import { queryCodex } from '../openai-codex.js'; import { Octokit } from '@octokit/rest'; +import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../shared/modelConstants.js'; const router = express.Router(); @@ -634,9 +635,14 @@ class ResponseCollector { * - true: Returns text/event-stream with incremental updates * - false: Returns complete JSON response after completion * - * @param {string} model - (Optional) Model identifier for Cursor provider. - * Only applicable when provider='cursor'. - * Examples: 'gpt-4', 'claude-3-opus', etc. + * @param {string} model - (Optional) Model identifier for providers. + * + * Claude models: 'sonnet' (default), 'opus', 'haiku', 'opusplan', 'sonnet[1m]' + * Cursor models: 'gpt-5' (default), 'gpt-5.2', 'gpt-5.2-high', 'sonnet-4.5', 'opus-4.5', + * 'gemini-3-pro', 'composer-1', 'auto', 'gpt-5.1', 'gpt-5.1-high', + * 'gpt-5.1-codex', 'gpt-5.1-codex-high', 'gpt-5.1-codex-max', + * 'gpt-5.1-codex-max-high', 'opus-4.1', 'grok', and thinking variants + * Codex models: 'gpt-5.2' (default), 'gpt-5.1-codex-max', 'o3', 'o4-mini' * * @param {boolean} cleanup - (Optional) Auto-cleanup project directory after completion. * Default: true @@ -939,6 +945,7 @@ router.post('/', validateExternalApiKey, async (req, res) => { projectPath: finalProjectPath, cwd: finalProjectPath, sessionId: null, // New session + model: model, permissionMode: 'bypassPermissions' // Bypass all permissions for API calls }, writer); @@ -959,7 +966,7 @@ router.post('/', validateExternalApiKey, async (req, res) => { projectPath: finalProjectPath, cwd: finalProjectPath, sessionId: null, - model: model || 'gpt-5.2', + model: model || CODEX_MODELS.DEFAULT, permissionMode: 'bypassPermissions' }, writer); } diff --git a/server/routes/commands.js b/server/routes/commands.js index 18a5c93..b13a8f3 100644 --- a/server/routes/commands.js +++ b/server/routes/commands.js @@ -4,6 +4,7 @@ import path from 'path'; import { fileURLToPath } from 'url'; import os from 'os'; import matter from 'gray-matter'; +import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../shared/modelConstants.js'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); @@ -182,23 +183,15 @@ Custom commands can be created in: }, '/model': async (args, context) => { - // Read available models from config or defaults + // Read available models from centralized constants const availableModels = { - claude: [ - 'claude-sonnet-4.5', - 'claude-sonnet-4', - 'claude-opus-4', - 'claude-sonnet-3.5' - ], - cursor: [ - 'gpt-5', - 'sonnet-4', - 'opus-4.1' - ] + claude: CLAUDE_MODELS.OPTIONS.map(o => o.value), + cursor: CURSOR_MODELS.OPTIONS.map(o => o.value), + codex: CODEX_MODELS.OPTIONS.map(o => o.value) }; const currentProvider = context?.provider || 'claude'; - const currentModel = context?.model || 'claude-sonnet-4.5'; + const currentModel = context?.model || CLAUDE_MODELS.DEFAULT; return { type: 'builtin', @@ -216,50 +209,6 @@ Custom commands can be created in: }; }, - '/cost': async (args, context) => { - // Calculate token usage and cost - const sessionId = context?.sessionId; - const tokenUsage = context?.tokenUsage || { used: 0, total: 200000 }; - - const costPerMillion = { - 'claude-sonnet-4.5': { input: 3, output: 15 }, - 'claude-sonnet-4': { input: 3, output: 15 }, - 'claude-opus-4': { input: 15, output: 75 }, - 'gpt-5': { input: 5, output: 15 } - }; - - const model = context?.model || 'claude-sonnet-4.5'; - const rates = costPerMillion[model] || costPerMillion['claude-sonnet-4.5']; - - // Estimate 70% input, 30% output - const estimatedInputTokens = Math.floor(tokenUsage.used * 0.7); - const estimatedOutputTokens = Math.floor(tokenUsage.used * 0.3); - - const inputCost = (estimatedInputTokens / 1000000) * rates.input; - const outputCost = (estimatedOutputTokens / 1000000) * rates.output; - const totalCost = inputCost + outputCost; - - return { - type: 'builtin', - action: 'cost', - data: { - tokenUsage: { - used: tokenUsage.used, - total: tokenUsage.total, - percentage: ((tokenUsage.used / tokenUsage.total) * 100).toFixed(1) - }, - cost: { - input: inputCost.toFixed(4), - output: outputCost.toFixed(4), - total: totalCost.toFixed(4), - currency: 'USD' - }, - model, - rates - } - }; - }, - '/status': async (args, context) => { // Read version from package.json const packageJsonPath = path.join(path.dirname(__dirname), '..', 'package.json'); diff --git a/server/routes/cursor.js b/server/routes/cursor.js index 5f7e873..4471ab7 100644 --- a/server/routes/cursor.js +++ b/server/routes/cursor.js @@ -6,6 +6,7 @@ import { spawn } from 'child_process'; import sqlite3 from 'sqlite3'; import { open } from 'sqlite'; import crypto from 'crypto'; +import { CURSOR_MODELS } from '../../shared/modelConstants.js'; const router = express.Router(); @@ -33,7 +34,7 @@ router.get('/config', async (req, res) => { config: { version: 1, model: { - modelId: "gpt-5", + modelId: CURSOR_MODELS.DEFAULT, displayName: "GPT-5" }, permissions: { diff --git a/shared/modelConstants.js b/shared/modelConstants.js new file mode 100644 index 0000000..7d4347f --- /dev/null +++ b/shared/modelConstants.js @@ -0,0 +1,65 @@ +/** + * Centralized Model Definitions + * Single source of truth for all supported AI models + */ + +/** + * Claude (Anthropic) Models + * + * Note: Claude uses two different formats: + * - SDK format ('sonnet', 'opus') - used by the UI and claude-sdk.js + * - API format ('claude-sonnet-4.5') - used by slash commands for display + */ +export const CLAUDE_MODELS = { + // Models in SDK format (what the actual SDK accepts) + OPTIONS: [ + { value: 'sonnet', label: 'Sonnet' }, + { value: 'opus', label: 'Opus' }, + { value: 'haiku', label: 'Haiku' }, + { value: 'opusplan', label: 'Opus Plan' }, + { value: 'sonnet[1m]', label: 'Sonnet [1M]' } + ], + + DEFAULT: 'sonnet' +}; + +/** + * Cursor Models + */ +export const CURSOR_MODELS = { + OPTIONS: [ + { value: 'gpt-5.2-high', label: 'GPT-5.2 High' }, + { value: 'gemini-3-pro', label: 'Gemini 3 Pro' }, + { value: 'opus-4.5-thinking', label: 'Claude 4.5 Opus (Thinking)' }, + { value: 'gpt-5.2', label: 'GPT-5.2' }, + { value: 'gpt-5.1', label: 'GPT-5.1' }, + { value: 'gpt-5.1-high', label: 'GPT-5.1 High' }, + { value: 'composer-1', label: 'Composer 1' }, + { value: 'auto', label: 'Auto' }, + { value: 'sonnet-4.5', label: 'Claude 4.5 Sonnet' }, + { value: 'sonnet-4.5-thinking', label: 'Claude 4.5 Sonnet (Thinking)' }, + { value: 'opus-4.5', label: 'Claude 4.5 Opus' }, + { value: 'gpt-5.1-codex', label: 'GPT-5.1 Codex' }, + { value: 'gpt-5.1-codex-high', label: 'GPT-5.1 Codex High' }, + { value: 'gpt-5.1-codex-max', label: 'GPT-5.1 Codex Max' }, + { value: 'gpt-5.1-codex-max-high', label: 'GPT-5.1 Codex Max High' }, + { value: 'opus-4.1', label: 'Claude 4.1 Opus' }, + { value: 'grok', label: 'Grok' } + ], + + DEFAULT: 'gpt-5' +}; + +/** + * Codex (OpenAI) Models + */ +export const CODEX_MODELS = { + OPTIONS: [ + { value: 'gpt-5.2', label: 'GPT-5.2' }, + { value: 'gpt-5.1-codex-max', label: 'GPT-5.1 Codex Max' }, + { value: 'o3', label: 'O3' }, + { value: 'o4-mini', label: 'O4-mini' } + ], + + DEFAULT: 'gpt-5.2' +}; diff --git a/src/components/ChatInterface.jsx b/src/components/ChatInterface.jsx index ed87071..337513c 100644 --- a/src/components/ChatInterface.jsx +++ b/src/components/ChatInterface.jsx @@ -35,6 +35,7 @@ import { MicButton } from './MicButton.jsx'; import { api, authenticatedFetch } from '../utils/api'; import Fuse from 'fuse.js'; import CommandMenu from './CommandMenu'; +import { CLAUDE_MODELS, CURSOR_MODELS, CODEX_MODELS } from '../../shared/modelConstants'; // Helper function to decode HTML entities in text @@ -1723,13 +1724,13 @@ function ChatInterface({ selectedProject, selectedSession, ws, sendMessage, mess return localStorage.getItem('selected-provider') || 'claude'; }); const [cursorModel, setCursorModel] = useState(() => { - return localStorage.getItem('cursor-model') || 'gpt-5'; + return localStorage.getItem('cursor-model') || CURSOR_MODELS.DEFAULT; }); const [claudeModel, setClaudeModel] = useState(() => { - return localStorage.getItem('claude-model') || 'sonnet'; + return localStorage.getItem('claude-model') || CLAUDE_MODELS.DEFAULT; }); const [codexModel, setCodexModel] = useState(() => { - return localStorage.getItem('codex-model') || 'gpt-5.2'; + return localStorage.getItem('codex-model') || CODEX_MODELS.DEFAULT; }); // Load permission mode for the current session useEffect(() => { @@ -1758,17 +1759,10 @@ function ChatInterface({ selectedProject, selectedSession, ws, sendMessage, mess .then(res => res.json()) .then(data => { if (data.success && data.config?.model?.modelId) { - // Map Cursor model IDs to our simplified names - const modelMap = { - 'gpt-5': 'gpt-5', - 'claude-4-sonnet': 'sonnet-4', - 'sonnet-4': 'sonnet-4', - 'claude-4-opus': 'opus-4.1', - 'opus-4.1': 'opus-4.1' - }; - const mappedModel = modelMap[data.config.model.modelId] || data.config.model.modelId; + // Use the model from config directly + const modelId = data.config.model.modelId; if (!localStorage.getItem('cursor-model')) { - setCursorModel(mappedModel); + setCursorModel(modelId); } } }) @@ -4547,11 +4541,9 @@ function ChatInterface({ selectedProject, selectedSession, ws, sendMessage, mess }} className="pl-4 pr-10 py-2 text-sm bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded-lg focus:ring-2 focus:ring-purple-500 focus:border-purple-500 min-w-[140px]" > - - - - - + {CLAUDE_MODELS.OPTIONS.map(({ value, label }) => ( + + ))} ) : provider === 'codex' ? ( ) : ( )}
provider string Optionalclaude or cursor (default: claude)claude, cursor, or codex (default: claude)
streammodel string OptionalModel to use (for Cursor) + Model identifier for the AI provider (loading from constants...) +
cleanup