Files
claudecodeui/server/database/db.js
Haile 96463df8da Feature/backend ts support andunification of auth settings on frontend (#654)
* fix: remove project dependency from settings controller and onboarding

* fix(settings): remove onClose prop from useSettingsController args

* chore: tailwind classes order

* refactor: move provider auth status management to custom hook

* refactor: rename SessionProvider to LLMProvider

* feat(frontend): support for @ alias based imports)

* fix: replace init.sql with schema.js

* fix: refactor database initialization to use schema.js for SQL statements

* feat(server): add a real backend TypeScript build and enforce module boundaries

The backend had started to grow beyond what the frontend-only tooling setup could
support safely. We were still running server code directly from /server, linting
mainly the client, and relying on path assumptions such as "../.." that only
worked in the source layout. That created three problems:

- backend alias imports were hard to resolve consistently in the editor, ESLint,
  and the runtime
- server code had no enforced module boundary rules, so cross-module deep imports
  could bypass intended public entry points
- building the backend into a separate output directory would break repo-level
  lookups for package.json, .env, dist, and public assets because those paths
  were derived from source-only relative assumptions

This change makes the backend tooling explicit and runtime-safe.

A dedicated backend TypeScript config now lives in server/tsconfig.json, with
tsconfig.server.json reduced to a compatibility shim. This gives the language
service and backend tooling a canonical project rooted in /server while still
preserving top-level compatibility for any existing references. The backend alias
mapping now resolves relative to /server, which avoids colliding with the
frontend's "@/..." -> "src/*" mapping.

The package scripts were updated so development runs through tsx with the backend
tsconfig, build now produces a compiled backend in dist-server, and typecheck/lint
cover both client and server. A new build-server.mjs script runs TypeScript and
tsc-alias and cleans dist-server first, which prevents stale compiled files from
shadowing current source files after refactors.

To make the compiled backend behave the same as the source backend, runtime path
resolution was centralized in server/utils/runtime-paths.js. Instead of assuming
fixed relative paths from each module, server entry points now resolve the actual
app root and server root at runtime. That keeps package.json, .env, dist, public,
and default database paths stable whether code is executed from /server or from
/dist-server/server.

ESLint was expanded from a frontend-only setup into a backend-aware one. The
backend now uses import resolution tied to the backend tsconfig so aliased imports
resolve correctly in linting, import ordering matches the frontend style, and
unused/duplicate imports are surfaced consistently.

Most importantly, eslint-plugin-boundaries now enforces server module boundaries.
Files under server/modules can no longer import another module's internals
directly. Cross-module imports must go through that module's barrel file
(index.ts/index.js). boundaries/no-unknown was also enabled so alias-resolution
gaps cannot silently bypass the rule.

Together, these changes make the backend buildable, keep runtime path resolution
stable after compilation, align server tooling with the client where appropriate,
and enforce a stricter modular architecture for server code.

* fix: update package.json to include dist-server in files and remove tsconfig.server.json

* refactor: remove build-server.mjs and inline its logic into package.json scripts

* fix: update paths in package.json and bin.js to use dist-server directory

* feat(eslint): add backend shared types and enforce compile-time contract for imports

* fix(eslint): update shared types pattern

---------

Co-authored-by: Haileyesus <something@gmail.com>
2026-04-15 13:26:12 +02:00

594 lines
18 KiB
JavaScript

import Database from 'better-sqlite3';
import path from 'path';
import fs from 'fs';
import crypto from 'crypto';
import { findAppRoot, getModuleDir } from '../utils/runtime-paths.js';
import {
APP_CONFIG_TABLE_SQL,
USER_NOTIFICATION_PREFERENCES_TABLE_SQL,
VAPID_KEYS_TABLE_SQL,
PUSH_SUBSCRIPTIONS_TABLE_SQL,
SESSION_NAMES_TABLE_SQL,
SESSION_NAMES_LOOKUP_INDEX_SQL,
DATABASE_SCHEMA_SQL
} from './schema.js';
const __dirname = getModuleDir(import.meta.url);
// The compiled backend lives under dist-server/server/database, but the install root we log
// should still point at the project/app root. Resolving it here avoids build-layout drift.
const APP_ROOT = findAppRoot(__dirname);
// ANSI color codes for terminal output
const colors = {
reset: '\x1b[0m',
bright: '\x1b[1m',
cyan: '\x1b[36m',
dim: '\x1b[2m',
};
const c = {
info: (text) => `${colors.cyan}${text}${colors.reset}`,
bright: (text) => `${colors.bright}${text}${colors.reset}`,
dim: (text) => `${colors.dim}${text}${colors.reset}`,
};
// Use DATABASE_PATH environment variable if set, otherwise use default location
const DB_PATH = process.env.DATABASE_PATH || path.join(__dirname, 'auth.db');
// Ensure database directory exists if custom path is provided
if (process.env.DATABASE_PATH) {
const dbDir = path.dirname(DB_PATH);
try {
if (!fs.existsSync(dbDir)) {
fs.mkdirSync(dbDir, { recursive: true });
console.log(`Created database directory: ${dbDir}`);
}
} catch (error) {
console.error(`Failed to create database directory ${dbDir}:`, error.message);
throw error;
}
}
// As part of 1.19.2 we are introducing a new location for auth.db. The below handles exisitng moving legacy database from install directory to new location
const LEGACY_DB_PATH = path.join(__dirname, 'auth.db');
if (DB_PATH !== LEGACY_DB_PATH && !fs.existsSync(DB_PATH) && fs.existsSync(LEGACY_DB_PATH)) {
try {
fs.copyFileSync(LEGACY_DB_PATH, DB_PATH);
console.log(`[MIGRATION] Copied database from ${LEGACY_DB_PATH} to ${DB_PATH}`);
for (const suffix of ['-wal', '-shm']) {
if (fs.existsSync(LEGACY_DB_PATH + suffix)) {
fs.copyFileSync(LEGACY_DB_PATH + suffix, DB_PATH + suffix);
}
}
} catch (err) {
console.warn(`[MIGRATION] Could not copy legacy database: ${err.message}`);
}
}
// Create database connection
const db = new Database(DB_PATH);
// app_config must exist before any other module imports (auth.js reads the JWT secret at load time).
// runMigrations() also creates this table, but it runs too late for existing installations
// where auth.js is imported before initializeDatabase() is called.
db.exec(APP_CONFIG_TABLE_SQL);
// Show app installation path prominently
const appInstallPath = APP_ROOT;
console.log('');
console.log(c.dim('═'.repeat(60)));
console.log(`${c.info('[INFO]')} App Installation: ${c.bright(appInstallPath)}`);
console.log(`${c.info('[INFO]')} Database: ${c.dim(path.relative(appInstallPath, DB_PATH))}`);
if (process.env.DATABASE_PATH) {
console.log(` ${c.dim('(Using custom DATABASE_PATH from environment)')}`);
}
console.log(c.dim('═'.repeat(60)));
console.log('');
const runMigrations = () => {
try {
const tableInfo = db.prepare("PRAGMA table_info(users)").all();
const columnNames = tableInfo.map(col => col.name);
if (!columnNames.includes('git_name')) {
console.log('Running migration: Adding git_name column');
db.exec('ALTER TABLE users ADD COLUMN git_name TEXT');
}
if (!columnNames.includes('git_email')) {
console.log('Running migration: Adding git_email column');
db.exec('ALTER TABLE users ADD COLUMN git_email TEXT');
}
if (!columnNames.includes('has_completed_onboarding')) {
console.log('Running migration: Adding has_completed_onboarding column');
db.exec('ALTER TABLE users ADD COLUMN has_completed_onboarding BOOLEAN DEFAULT 0');
}
db.exec(USER_NOTIFICATION_PREFERENCES_TABLE_SQL);
db.exec(VAPID_KEYS_TABLE_SQL);
db.exec(PUSH_SUBSCRIPTIONS_TABLE_SQL);
db.exec(APP_CONFIG_TABLE_SQL);
db.exec(SESSION_NAMES_TABLE_SQL);
db.exec(SESSION_NAMES_LOOKUP_INDEX_SQL);
console.log('Database migrations completed successfully');
} catch (error) {
console.error('Error running migrations:', error.message);
throw error;
}
};
// Initialize database with schema
const initializeDatabase = async () => {
try {
db.exec(DATABASE_SCHEMA_SQL);
console.log('Database initialized successfully');
runMigrations();
} catch (error) {
console.error('Error initializing database:', error.message);
throw error;
}
};
// User database operations
const userDb = {
// Check if any users exist
hasUsers: () => {
try {
const row = db.prepare('SELECT COUNT(*) as count FROM users').get();
return row.count > 0;
} catch (err) {
throw err;
}
},
// Create a new user
createUser: (username, passwordHash) => {
try {
const stmt = db.prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)');
const result = stmt.run(username, passwordHash);
return { id: result.lastInsertRowid, username };
} catch (err) {
throw err;
}
},
// Get user by username
getUserByUsername: (username) => {
try {
const row = db.prepare('SELECT * FROM users WHERE username = ? AND is_active = 1').get(username);
return row;
} catch (err) {
throw err;
}
},
// Update last login time (non-fatal — logged but not thrown)
updateLastLogin: (userId) => {
try {
db.prepare('UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?').run(userId);
} catch (err) {
console.warn('Failed to update last login:', err.message);
}
},
// Get user by ID
getUserById: (userId) => {
try {
const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1').get(userId);
return row;
} catch (err) {
throw err;
}
},
getFirstUser: () => {
try {
const row = db.prepare('SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1').get();
return row;
} catch (err) {
throw err;
}
},
updateGitConfig: (userId, gitName, gitEmail) => {
try {
const stmt = db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?');
stmt.run(gitName, gitEmail, userId);
} catch (err) {
throw err;
}
},
getGitConfig: (userId) => {
try {
const row = db.prepare('SELECT git_name, git_email FROM users WHERE id = ?').get(userId);
return row;
} catch (err) {
throw err;
}
},
completeOnboarding: (userId) => {
try {
const stmt = db.prepare('UPDATE users SET has_completed_onboarding = 1 WHERE id = ?');
stmt.run(userId);
} catch (err) {
throw err;
}
},
hasCompletedOnboarding: (userId) => {
try {
const row = db.prepare('SELECT has_completed_onboarding FROM users WHERE id = ?').get(userId);
return row?.has_completed_onboarding === 1;
} catch (err) {
throw err;
}
}
};
// API Keys database operations
const apiKeysDb = {
// Generate a new API key
generateApiKey: () => {
return 'ck_' + crypto.randomBytes(32).toString('hex');
},
// Create a new API key
createApiKey: (userId, keyName) => {
try {
const apiKey = apiKeysDb.generateApiKey();
const stmt = db.prepare('INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)');
const result = stmt.run(userId, keyName, apiKey);
return { id: result.lastInsertRowid, keyName, apiKey };
} catch (err) {
throw err;
}
},
// Get all API keys for a user
getApiKeys: (userId) => {
try {
const rows = db.prepare('SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC').all(userId);
return rows;
} catch (err) {
throw err;
}
},
// Validate API key and get user
validateApiKey: (apiKey) => {
try {
const row = db.prepare(`
SELECT u.id, u.username, ak.id as api_key_id
FROM api_keys ak
JOIN users u ON ak.user_id = u.id
WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1
`).get(apiKey);
if (row) {
// Update last_used timestamp
db.prepare('UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?').run(row.api_key_id);
}
return row;
} catch (err) {
throw err;
}
},
// Delete an API key
deleteApiKey: (userId, apiKeyId) => {
try {
const stmt = db.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?');
const result = stmt.run(apiKeyId, userId);
return result.changes > 0;
} catch (err) {
throw err;
}
},
// Toggle API key active status
toggleApiKey: (userId, apiKeyId, isActive) => {
try {
const stmt = db.prepare('UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?');
const result = stmt.run(isActive ? 1 : 0, apiKeyId, userId);
return result.changes > 0;
} catch (err) {
throw err;
}
}
};
// User credentials database operations (for GitHub tokens, GitLab tokens, etc.)
const credentialsDb = {
// Create a new credential
createCredential: (userId, credentialName, credentialType, credentialValue, description = null) => {
try {
const stmt = db.prepare('INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)');
const result = stmt.run(userId, credentialName, credentialType, credentialValue, description);
return { id: result.lastInsertRowid, credentialName, credentialType };
} catch (err) {
throw err;
}
},
// Get all credentials for a user, optionally filtered by type
getCredentials: (userId, credentialType = null) => {
try {
let query = 'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ?';
const params = [userId];
if (credentialType) {
query += ' AND credential_type = ?';
params.push(credentialType);
}
query += ' ORDER BY created_at DESC';
const rows = db.prepare(query).all(...params);
return rows;
} catch (err) {
throw err;
}
},
// Get active credential value for a user by type (returns most recent active)
getActiveCredential: (userId, credentialType) => {
try {
const row = db.prepare('SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1').get(userId, credentialType);
return row?.credential_value || null;
} catch (err) {
throw err;
}
},
// Delete a credential
deleteCredential: (userId, credentialId) => {
try {
const stmt = db.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?');
const result = stmt.run(credentialId, userId);
return result.changes > 0;
} catch (err) {
throw err;
}
},
// Toggle credential active status
toggleCredential: (userId, credentialId, isActive) => {
try {
const stmt = db.prepare('UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?');
const result = stmt.run(isActive ? 1 : 0, credentialId, userId);
return result.changes > 0;
} catch (err) {
throw err;
}
}
};
const DEFAULT_NOTIFICATION_PREFERENCES = {
channels: {
inApp: false,
webPush: false
},
events: {
actionRequired: true,
stop: true,
error: true
}
};
const normalizeNotificationPreferences = (value) => {
const source = value && typeof value === 'object' ? value : {};
return {
channels: {
inApp: source.channels?.inApp === true,
webPush: source.channels?.webPush === true
},
events: {
actionRequired: source.events?.actionRequired !== false,
stop: source.events?.stop !== false,
error: source.events?.error !== false
}
};
};
const notificationPreferencesDb = {
getPreferences: (userId) => {
try {
const row = db.prepare('SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?').get(userId);
if (!row) {
const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES);
db.prepare(
'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)'
).run(userId, JSON.stringify(defaults));
return defaults;
}
let parsed;
try {
parsed = JSON.parse(row.preferences_json);
} catch {
parsed = DEFAULT_NOTIFICATION_PREFERENCES;
}
return normalizeNotificationPreferences(parsed);
} catch (err) {
throw err;
}
},
updatePreferences: (userId, preferences) => {
try {
const normalized = normalizeNotificationPreferences(preferences);
db.prepare(
`INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at)
VALUES (?, ?, CURRENT_TIMESTAMP)
ON CONFLICT(user_id) DO UPDATE SET
preferences_json = excluded.preferences_json,
updated_at = CURRENT_TIMESTAMP`
).run(userId, JSON.stringify(normalized));
return normalized;
} catch (err) {
throw err;
}
}
};
const pushSubscriptionsDb = {
saveSubscription: (userId, endpoint, keysP256dh, keysAuth) => {
try {
db.prepare(
`INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth)
VALUES (?, ?, ?, ?)
ON CONFLICT(endpoint) DO UPDATE SET
user_id = excluded.user_id,
keys_p256dh = excluded.keys_p256dh,
keys_auth = excluded.keys_auth`
).run(userId, endpoint, keysP256dh, keysAuth);
} catch (err) {
throw err;
}
},
getSubscriptions: (userId) => {
try {
return db.prepare('SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?').all(userId);
} catch (err) {
throw err;
}
},
removeSubscription: (endpoint) => {
try {
db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint);
} catch (err) {
throw err;
}
},
removeAllForUser: (userId) => {
try {
db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId);
} catch (err) {
throw err;
}
}
};
// Session custom names database operations
const sessionNamesDb = {
// Set (insert or update) a custom session name
setName: (sessionId, provider, customName) => {
db.prepare(`
INSERT INTO session_names (session_id, provider, custom_name)
VALUES (?, ?, ?)
ON CONFLICT(session_id, provider)
DO UPDATE SET custom_name = excluded.custom_name, updated_at = CURRENT_TIMESTAMP
`).run(sessionId, provider, customName);
},
// Get a single custom session name
getName: (sessionId, provider) => {
const row = db.prepare(
'SELECT custom_name FROM session_names WHERE session_id = ? AND provider = ?'
).get(sessionId, provider);
return row?.custom_name || null;
},
// Batch lookup — returns Map<sessionId, customName>
getNames: (sessionIds, provider) => {
if (!sessionIds.length) return new Map();
const placeholders = sessionIds.map(() => '?').join(',');
const rows = db.prepare(
`SELECT session_id, custom_name FROM session_names
WHERE session_id IN (${placeholders}) AND provider = ?`
).all(...sessionIds, provider);
return new Map(rows.map(r => [r.session_id, r.custom_name]));
},
// Delete a custom session name
deleteName: (sessionId, provider) => {
return db.prepare(
'DELETE FROM session_names WHERE session_id = ? AND provider = ?'
).run(sessionId, provider).changes > 0;
},
};
// Apply custom session names from the database (overrides CLI-generated summaries)
function applyCustomSessionNames(sessions, provider) {
if (!sessions?.length) return;
try {
const ids = sessions.map(s => s.id);
const customNames = sessionNamesDb.getNames(ids, provider);
for (const session of sessions) {
const custom = customNames.get(session.id);
if (custom) session.summary = custom;
}
} catch (error) {
console.warn(`[DB] Failed to apply custom session names for ${provider}:`, error.message);
}
}
// App config database operations
const appConfigDb = {
get: (key) => {
try {
const row = db.prepare('SELECT value FROM app_config WHERE key = ?').get(key);
return row?.value || null;
} catch (err) {
return null;
}
},
set: (key, value) => {
db.prepare(
'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value'
).run(key, value);
},
getOrCreateJwtSecret: () => {
let secret = appConfigDb.get('jwt_secret');
if (!secret) {
secret = crypto.randomBytes(64).toString('hex');
appConfigDb.set('jwt_secret', secret);
}
return secret;
}
};
// Backward compatibility - keep old names pointing to new system
const githubTokensDb = {
createGithubToken: (userId, tokenName, githubToken, description = null) => {
return credentialsDb.createCredential(userId, tokenName, 'github_token', githubToken, description);
},
getGithubTokens: (userId) => {
return credentialsDb.getCredentials(userId, 'github_token');
},
getActiveGithubToken: (userId) => {
return credentialsDb.getActiveCredential(userId, 'github_token');
},
deleteGithubToken: (userId, tokenId) => {
return credentialsDb.deleteCredential(userId, tokenId);
},
toggleGithubToken: (userId, tokenId, isActive) => {
return credentialsDb.toggleCredential(userId, tokenId, isActive);
}
};
export {
db,
initializeDatabase,
userDb,
apiKeysDb,
credentialsDb,
notificationPreferencesDb,
pushSubscriptionsDb,
sessionNamesDb,
applyCustomSessionNames,
appConfigDb,
githubTokensDb // Backward compatibility
};