mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-04-30 09:21:33 +00:00
refactor(database): move db into typescript
- Implemented githubTokensDb for managing GitHub tokens with CRUD operations. - Created otificationPreferencesDb to handle user notification preferences. - Added projectsDb for project path management and related operations. - Introduced pushSubscriptionsDb for managing browser push subscriptions. - Developed scanStateDb to track the last scanned timestamp. - Established sessionsDb for session management with CRUD functionalities. - Created userDb for user management, including authentication and onboarding. - Implemented apidKeysDb for storing and managing VAPID keys. feat(database): define schema for new database tables - Added SQL schema definitions for users, API keys, user credentials, notification preferences, VAPID keys, push subscriptions, projects, sessions, scan state, and app configuration. - Included necessary indexes for performance optimization. refactor(shared): enhance type definitions and utility functions - Updated shared types and interfaces for improved clarity and consistency. - Added new types for credential management and provider-specific operations. - Refined utility functions for better error handling and message normalization.
This commit is contained in:
143
server/modules/database/connection.ts
Normal file
143
server/modules/database/connection.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
/**
|
||||
* Database connection management.
|
||||
*
|
||||
* Owns the single SQLite connection used across all repositories.
|
||||
* Handles path resolution, directory creation, legacy database migration,
|
||||
* and eager app_config bootstrap so the auth middleware can read the
|
||||
* JWT secret before the full schema is applied.
|
||||
*
|
||||
* Consumers should never create their own Database instance — they use
|
||||
* `getConnection()` to obtain the shared singleton.
|
||||
*/
|
||||
|
||||
import Database from 'better-sqlite3';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
import { APP_CONFIG_TABLE_SCHEMA_SQL } from '@/modules/database/schema.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Path resolution
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Resolves the database file path from environment or falls back
|
||||
* to the legacy location inside the server/database/ folder.
|
||||
*
|
||||
* Priority:
|
||||
* 1. DATABASE_PATH environment variable (set by cli.js or load-env-vars.js)
|
||||
* 2. Legacy path: server/database/auth.db
|
||||
*/
|
||||
function resolveDatabasePath(): string {
|
||||
// process.env.DATABASE_PATH is set by load-env-vars.js to either the .env value or a default(~/.cloudcli/auth.db) in the user's home directory.
|
||||
return process.env.DATABASE_PATH || resolveLegacyDatabasePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the legacy database path (always inside server/database/).
|
||||
* Used for the one-time migration to the new external location.
|
||||
*/
|
||||
function resolveLegacyDatabasePath(): string {
|
||||
const serverDir = path.resolve(__dirname, '..', '..', '..');
|
||||
return path.join(serverDir, 'database', 'auth.db');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Directory & migration helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ensureDatabaseDirectory(dbPath: string): void {
|
||||
const dir = path.dirname(dbPath);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
console.log('Created database directory:', dir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If the database was moved to an external location (e.g. ~/.cloudcli/)
|
||||
* but the user still has a legacy auth.db inside the install directory,
|
||||
* copy it to the new location as a one-time migration.
|
||||
*/
|
||||
function migrateLegacyDatabase(targetPath: string): void {
|
||||
const legacyPath = resolveLegacyDatabasePath();
|
||||
|
||||
if (targetPath === legacyPath) return;
|
||||
if (fs.existsSync(targetPath)) return;
|
||||
if (!fs.existsSync(legacyPath)) return;
|
||||
|
||||
try {
|
||||
fs.copyFileSync(legacyPath, targetPath);
|
||||
console.log('Migrated legacy database', { from: legacyPath, to: targetPath });
|
||||
|
||||
|
||||
// copy the write-ahead log and shared memory files (auth.db-wal, auth.db-shm) if they exist, to preserve any uncommitted transactions
|
||||
for (const suffix of ['-wal', '-shm']) {
|
||||
const src = legacyPath + suffix;
|
||||
if (fs.existsSync(src)) {
|
||||
fs.copyFileSync(src, targetPath + suffix);
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error('Could not migrate legacy database', { error: err.message });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Singleton connection
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
let instance: Database.Database | null = null;
|
||||
|
||||
/**
|
||||
* Returns the shared database connection, creating it on first call.
|
||||
*
|
||||
* The first invocation:
|
||||
* 1. Resolves the target database path
|
||||
* 2. Ensures the parent directory exists
|
||||
* 3. Migrates from the legacy install-directory path if needed
|
||||
* 4. Opens the SQLite connection
|
||||
* 5. Eagerly creates the app_config table (auth reads JWT secret at import time)
|
||||
* 6. Logs the database location
|
||||
*/
|
||||
export function getConnection(): Database.Database {
|
||||
if (instance) return instance;
|
||||
|
||||
const dbPath = resolveDatabasePath();
|
||||
|
||||
ensureDatabaseDirectory(dbPath);
|
||||
migrateLegacyDatabase(dbPath);
|
||||
|
||||
instance = new Database(dbPath);
|
||||
|
||||
// app_config must exist immediately — the auth middleware reads
|
||||
// the JWT secret at module-load time, before initializeDatabase() runs.
|
||||
instance.exec(APP_CONFIG_TABLE_SCHEMA_SQL);
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the resolved database file path without opening a connection.
|
||||
* Useful for diagnostics and CLI status commands.
|
||||
*/
|
||||
export function getDatabasePath(): string {
|
||||
return resolveDatabasePath();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the database connection and clears the singleton.
|
||||
* Primarily used for graceful shutdown or testing.
|
||||
*/
|
||||
export function closeConnection(): void {
|
||||
if (instance) {
|
||||
instance.close();
|
||||
instance = null;
|
||||
console.log('Database connection closed');
|
||||
}
|
||||
}
|
||||
17
server/modules/database/init-db.ts
Normal file
17
server/modules/database/init-db.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { getConnection } from "@/modules/database/connection.js";
|
||||
import { runMigrations } from "@/modules/database/migrations.js";
|
||||
import { INIT_SCHEMA_SQL } from "@/modules/database/schema.js";
|
||||
|
||||
// Initialize database with schema
|
||||
export const initializeDatabase = async () => {
|
||||
try {
|
||||
const db = getConnection();
|
||||
db.exec(INIT_SCHEMA_SQL);
|
||||
console.log('Database schema applied');
|
||||
runMigrations(db);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.log('Database initialization failed', { error: message });
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
292
server/modules/database/migrations.ts
Normal file
292
server/modules/database/migrations.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { Database } from 'better-sqlite3';
|
||||
|
||||
import {
|
||||
APP_CONFIG_TABLE_SCHEMA_SQL,
|
||||
LAST_SCANNED_AT_SQL,
|
||||
PROJECTS_TABLE_SCHEMA_SQL,
|
||||
PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL,
|
||||
SESSIONS_TABLE_SCHEMA_SQL,
|
||||
USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL,
|
||||
VAPID_KEYS_TABLE_SCHEMA_SQL,
|
||||
} from '@/modules/database/schema.js';
|
||||
|
||||
const SQLITE_UUID_SQL = `
|
||||
lower(hex(randomblob(4))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(2))) || '-' ||
|
||||
lower(hex(randomblob(6)))
|
||||
`;
|
||||
|
||||
type TableInfoRow = {
|
||||
name: string;
|
||||
pk: number;
|
||||
};
|
||||
|
||||
const addColumnToTableIfNotExists = (
|
||||
db: Database,
|
||||
tableName: string,
|
||||
columnNames: string[],
|
||||
columnName: string,
|
||||
columnType: string
|
||||
) => {
|
||||
if (!columnNames.includes(columnName)) {
|
||||
console.log(`Running migration: Adding ${columnName} column to ${tableName} table`);
|
||||
db.exec(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType}`);
|
||||
}
|
||||
};
|
||||
|
||||
const tableExists = (db: Database, tableName: string): boolean =>
|
||||
Boolean(
|
||||
db
|
||||
.prepare("SELECT name FROM sqlite_master WHERE type = 'table' AND name = ?")
|
||||
.get(tableName)
|
||||
);
|
||||
|
||||
const getTableInfo = (db: Database, tableName: string): TableInfoRow[] =>
|
||||
db.prepare(`PRAGMA table_info(${tableName})`).all() as TableInfoRow[];
|
||||
|
||||
const migrateLegacySessionNames = (db: Database): void => {
|
||||
const hasLegacySessionNamesTable = tableExists(db, 'session_names');
|
||||
const hasSessionsTable = tableExists(db, 'sessions');
|
||||
|
||||
if (!hasLegacySessionNamesTable) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasSessionsTable) {
|
||||
console.log('Running migration: Merging session_names into sessions');
|
||||
db.exec(`
|
||||
INSERT OR REPLACE INTO sessions (session_id, provider, custom_name, created_at, updated_at)
|
||||
SELECT
|
||||
session_id,
|
||||
COALESCE(provider, 'claude'),
|
||||
custom_name,
|
||||
COALESCE(created_at, CURRENT_TIMESTAMP),
|
||||
COALESCE(updated_at, CURRENT_TIMESTAMP)
|
||||
FROM session_names
|
||||
`);
|
||||
db.exec('DROP TABLE session_names');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Renaming session_names table to sessions');
|
||||
db.exec('ALTER TABLE session_names RENAME TO sessions');
|
||||
};
|
||||
|
||||
const migrateLegacyWorkspaceTableIntoProjects = (db: Database): void => {
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
|
||||
if (!tableExists(db, 'workspace_original_paths')) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Migrating workspace_original_paths data into projects');
|
||||
db.exec(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived)
|
||||
SELECT
|
||||
CASE
|
||||
WHEN workspace_id IS NULL OR trim(workspace_id) = ''
|
||||
THEN ${SQLITE_UUID_SQL}
|
||||
ELSE workspace_id
|
||||
END,
|
||||
workspace_path,
|
||||
custom_workspace_name,
|
||||
COALESCE(isStarred, 0),
|
||||
0
|
||||
FROM workspace_original_paths
|
||||
WHERE workspace_path IS NOT NULL AND trim(workspace_path) <> ''
|
||||
ON CONFLICT(project_path) DO UPDATE SET
|
||||
custom_project_name = COALESCE(projects.custom_project_name, excluded.custom_project_name),
|
||||
isStarred = COALESCE(projects.isStarred, excluded.isStarred)
|
||||
`);
|
||||
};
|
||||
|
||||
const rebuildSessionsTableWithProjectSchema = (db: Database): void => {
|
||||
const hasSessions = tableExists(db, 'sessions');
|
||||
if (!hasSessions) {
|
||||
db.exec(SESSIONS_TABLE_SCHEMA_SQL);
|
||||
return;
|
||||
}
|
||||
|
||||
const sessionsTableInfo = getTableInfo(db, 'sessions');
|
||||
const columnNames = sessionsTableInfo.map((column) => column.name);
|
||||
const primaryKeyColumns = sessionsTableInfo
|
||||
.filter((column) => column.pk > 0)
|
||||
.sort((a, b) => a.pk - b.pk)
|
||||
.map((column) => column.name);
|
||||
|
||||
const shouldRebuild =
|
||||
!columnNames.includes('project_path') ||
|
||||
primaryKeyColumns.length !== 2 ||
|
||||
primaryKeyColumns[0] !== 'session_id' ||
|
||||
primaryKeyColumns[1] !== 'provider';
|
||||
|
||||
if (!shouldRebuild) {
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'jsonl_path', 'TEXT');
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'created_at', 'DATETIME');
|
||||
addColumnToTableIfNotExists(db, 'sessions', columnNames, 'updated_at', 'DATETIME');
|
||||
db.exec('UPDATE sessions SET created_at = COALESCE(created_at, CURRENT_TIMESTAMP)');
|
||||
db.exec('UPDATE sessions SET updated_at = COALESCE(updated_at, CURRENT_TIMESTAMP)');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Running migration: Rebuilding sessions table to project-based schema');
|
||||
|
||||
const projectPathExpression = columnNames.includes('project_path')
|
||||
? 'project_path'
|
||||
: columnNames.includes('workspace_path')
|
||||
? 'workspace_path'
|
||||
: 'NULL';
|
||||
|
||||
const providerExpression = columnNames.includes('provider')
|
||||
? "COALESCE(provider, 'claude')"
|
||||
: "'claude'";
|
||||
|
||||
const customNameExpression = columnNames.includes('custom_name')
|
||||
? 'custom_name'
|
||||
: 'NULL';
|
||||
|
||||
const jsonlPathExpression = columnNames.includes('jsonl_path')
|
||||
? 'jsonl_path'
|
||||
: 'NULL';
|
||||
|
||||
const createdAtExpression = columnNames.includes('created_at')
|
||||
? 'COALESCE(created_at, CURRENT_TIMESTAMP)'
|
||||
: 'CURRENT_TIMESTAMP';
|
||||
|
||||
const updatedAtExpression = columnNames.includes('updated_at')
|
||||
? 'COALESCE(updated_at, CURRENT_TIMESTAMP)'
|
||||
: 'CURRENT_TIMESTAMP';
|
||||
|
||||
db.exec('PRAGMA foreign_keys = OFF');
|
||||
try {
|
||||
db.exec('BEGIN TRANSACTION');
|
||||
db.exec('DROP TABLE IF EXISTS sessions__new');
|
||||
db.exec(`
|
||||
CREATE TABLE sessions__new (
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT,
|
||||
project_path TEXT,
|
||||
jsonl_path TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (session_id, provider),
|
||||
FOREIGN KEY (project_path) REFERENCES projects(project_path)
|
||||
ON DELETE SET NULL
|
||||
ON UPDATE CASCADE
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
INSERT OR REPLACE INTO sessions__new (
|
||||
session_id,
|
||||
provider,
|
||||
custom_name,
|
||||
project_path,
|
||||
jsonl_path,
|
||||
created_at,
|
||||
updated_at
|
||||
)
|
||||
SELECT
|
||||
session_id,
|
||||
${providerExpression},
|
||||
${customNameExpression},
|
||||
${projectPathExpression},
|
||||
${jsonlPathExpression},
|
||||
${createdAtExpression},
|
||||
${updatedAtExpression}
|
||||
FROM sessions
|
||||
WHERE session_id IS NOT NULL AND trim(session_id) <> ''
|
||||
`);
|
||||
db.exec('DROP TABLE sessions');
|
||||
db.exec('ALTER TABLE sessions__new RENAME TO sessions');
|
||||
db.exec('COMMIT');
|
||||
} catch (migrationError) {
|
||||
db.exec('ROLLBACK');
|
||||
throw migrationError;
|
||||
} finally {
|
||||
db.exec('PRAGMA foreign_keys = ON');
|
||||
}
|
||||
};
|
||||
|
||||
const ensureProjectsForSessionPaths = (db: Database): void => {
|
||||
if (!tableExists(db, 'sessions')) {
|
||||
return;
|
||||
}
|
||||
|
||||
db.exec(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isStarred, isArchived)
|
||||
SELECT
|
||||
${SQLITE_UUID_SQL},
|
||||
project_path,
|
||||
NULL,
|
||||
0,
|
||||
0
|
||||
FROM sessions
|
||||
WHERE project_path IS NOT NULL AND trim(project_path) <> ''
|
||||
ON CONFLICT(project_path) DO NOTHING
|
||||
`);
|
||||
};
|
||||
|
||||
export const runMigrations = (db: Database) => {
|
||||
try {
|
||||
const usersTableInfo = db.prepare('PRAGMA table_info(users)').all() as { name: string }[];
|
||||
const userColumnNames = usersTableInfo.map((column) => column.name);
|
||||
|
||||
addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_name', 'TEXT');
|
||||
addColumnToTableIfNotExists(db, 'users', userColumnNames, 'git_email', 'TEXT');
|
||||
addColumnToTableIfNotExists(
|
||||
db,
|
||||
'users',
|
||||
userColumnNames,
|
||||
'has_completed_onboarding',
|
||||
'BOOLEAN DEFAULT 0'
|
||||
);
|
||||
|
||||
db.exec(APP_CONFIG_TABLE_SCHEMA_SQL);
|
||||
db.exec(USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL);
|
||||
db.exec(VAPID_KEYS_TABLE_SCHEMA_SQL);
|
||||
db.exec(PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL);
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id)');
|
||||
|
||||
db.exec(PROJECTS_TABLE_SCHEMA_SQL);
|
||||
const projectsTableInfo = getTableInfo(db, 'projects');
|
||||
const projectColumnNames = projectsTableInfo.map((column) => column.name);
|
||||
addColumnToTableIfNotExists(db, 'projects', projectColumnNames, 'custom_project_name', 'TEXT DEFAULT NULL');
|
||||
addColumnToTableIfNotExists(db, 'projects', projectColumnNames, 'project_id', 'TEXT');
|
||||
addColumnToTableIfNotExists(db, 'projects', projectColumnNames, 'isStarred', 'BOOLEAN DEFAULT 0');
|
||||
addColumnToTableIfNotExists(db, 'projects', projectColumnNames, 'isArchived', 'BOOLEAN DEFAULT 0');
|
||||
db.exec(`
|
||||
UPDATE projects
|
||||
SET project_id = ${SQLITE_UUID_SQL}
|
||||
WHERE project_id IS NULL OR trim(project_id) = ''
|
||||
`);
|
||||
|
||||
migrateLegacyWorkspaceTableIntoProjects(db);
|
||||
migrateLegacySessionNames(db);
|
||||
rebuildSessionsTableWithProjectSchema(db);
|
||||
ensureProjectsForSessionPaths(db);
|
||||
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_sessions_project_path ON sessions(project_path)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_starred ON projects(isStarred)');
|
||||
db.exec('CREATE INDEX IF NOT EXISTS idx_projects_is_archived ON projects(isArchived)');
|
||||
|
||||
db.exec('DROP INDEX IF EXISTS idx_session_names_lookup');
|
||||
db.exec('DROP INDEX IF EXISTS idx_sessions_workspace_path');
|
||||
db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_is_starred');
|
||||
db.exec('DROP INDEX IF EXISTS idx_workspace_original_paths_workspace_id');
|
||||
|
||||
if (tableExists(db, 'workspace_original_paths')) {
|
||||
console.log('Running migration: Dropping legacy workspace_original_paths table');
|
||||
db.exec('DROP TABLE workspace_original_paths');
|
||||
}
|
||||
|
||||
db.exec(LAST_SCANNED_AT_SQL);
|
||||
console.log('Database migrations completed successfully');
|
||||
} catch (error: any) {
|
||||
console.error('Error running migrations:', error.message);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
119
server/modules/database/repositories/api-keys.ts
Normal file
119
server/modules/database/repositories/api-keys.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
/**
|
||||
* API keys repository.
|
||||
*
|
||||
* Manages API keys used for external/programmatic access to the backend.
|
||||
* Keys are prefixed with `ck_` and tied to a user via foreign key.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type ApiKeyRow = {
|
||||
id: number;
|
||||
key_name: string;
|
||||
api_key: string;
|
||||
created_at: string;
|
||||
last_used: string | null;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
type CreateApiKeyResult = {
|
||||
id: number | bigint;
|
||||
keyName: string;
|
||||
apiKey: string;
|
||||
};
|
||||
|
||||
type ValidatedApiKeyUser = {
|
||||
id: number;
|
||||
username: string;
|
||||
api_key_id: number;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Generates a cryptographically random API key with the `ck_` prefix. */
|
||||
function generateApiKey(): string {
|
||||
return 'ck_' + crypto.randomBytes(32).toString('hex');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const apiKeysDb = {
|
||||
generateApiKey,
|
||||
|
||||
/** Creates a new API key for the given user and returns it for one-time display. */
|
||||
createApiKey(userId: number, keyName: string): CreateApiKeyResult {
|
||||
const db = getConnection();
|
||||
const apiKey = generateApiKey();
|
||||
const result = db
|
||||
.prepare(
|
||||
'INSERT INTO api_keys (user_id, key_name, api_key) VALUES (?, ?, ?)'
|
||||
)
|
||||
.run(userId, keyName, apiKey);
|
||||
return { id: result.lastInsertRowid, keyName, apiKey };
|
||||
},
|
||||
|
||||
/** Lists all API keys for a user, most recent first. */
|
||||
getApiKeys(userId: number): ApiKeyRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, key_name, api_key, created_at, last_used, is_active FROM api_keys WHERE user_id = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId) as ApiKeyRow[];
|
||||
},
|
||||
|
||||
/**
|
||||
* Validates an API key and resolves the owning user.
|
||||
* If the key is valid, its `last_used` timestamp is updated as a side effect.
|
||||
* Returns undefined when the key is invalid or the user is inactive.
|
||||
*/
|
||||
validateApiKey(apiKey: string): ValidatedApiKeyUser | undefined {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT u.id, u.username, ak.id as api_key_id
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.api_key = ? AND ak.is_active = 1 AND u.is_active = 1`
|
||||
)
|
||||
.get(apiKey) as ValidatedApiKeyUser | undefined;
|
||||
|
||||
if (row) {
|
||||
db.prepare(
|
||||
'UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = ?'
|
||||
).run(row.api_key_id);
|
||||
}
|
||||
|
||||
return row;
|
||||
},
|
||||
|
||||
/** Permanently removes an API key. Returns true if a row was deleted. */
|
||||
deleteApiKey(userId: number, apiKeyId: number): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('DELETE FROM api_keys WHERE id = ? AND user_id = ?')
|
||||
.run(apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
|
||||
/** Enables or disables an API key without deleting it. */
|
||||
toggleApiKey(
|
||||
userId: number,
|
||||
apiKeyId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'UPDATE api_keys SET is_active = ? WHERE id = ? AND user_id = ?'
|
||||
)
|
||||
.run(isActive ? 1 : 0, apiKeyId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
};
|
||||
53
server/modules/database/repositories/app-config.ts
Normal file
53
server/modules/database/repositories/app-config.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* App config repository.
|
||||
*
|
||||
* Key-value store for application-level configuration that persists
|
||||
* across restarts (JWT secret, feature flags, etc.). Values are always
|
||||
* stored as strings; callers handle parsing.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const appConfigDb = {
|
||||
/** Returns the stored value for a config key, or null if missing. */
|
||||
get(key: string): string | null {
|
||||
try {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare('SELECT value FROM app_config WHERE key = ?')
|
||||
.get(key) as { value: string } | undefined;
|
||||
return row?.value ?? null;
|
||||
} catch {
|
||||
// Swallow errors so early-startup reads (e.g. JWT secret) do not crash.
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
/** Inserts or updates a config key (upsert). */
|
||||
set(key: string, value: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'INSERT INTO app_config (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value'
|
||||
).run(key, value);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the JWT signing secret, generating and persisting one
|
||||
* if it does not already exist. This ensures the secret survives
|
||||
* server restarts while being created automatically on first boot.
|
||||
*/
|
||||
getOrCreateJwtSecret(): string {
|
||||
let secret = appConfigDb.get('jwt_secret');
|
||||
if (!secret) {
|
||||
secret = crypto.randomBytes(64).toString('hex');
|
||||
appConfigDb.set('jwt_secret', secret);
|
||||
}
|
||||
return secret;
|
||||
},
|
||||
};
|
||||
106
server/modules/database/repositories/credentials.ts
Normal file
106
server/modules/database/repositories/credentials.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* User credentials repository.
|
||||
*
|
||||
* Manages external service tokens (GitHub, GitLab, Bitbucket, etc.)
|
||||
* stored per-user. Each credential has a type discriminator so multiple
|
||||
* credential kinds can coexist in the same table.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import type {
|
||||
CreateCredentialResult,
|
||||
CredentialPublicRow,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const credentialsDb = {
|
||||
/** Stores a new credential and returns a safe (no raw value) result. */
|
||||
createCredential(
|
||||
userId: number,
|
||||
credentialName: string,
|
||||
credentialType: string,
|
||||
credentialValue: string,
|
||||
description: string | null = null
|
||||
): CreateCredentialResult {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'INSERT INTO user_credentials (user_id, credential_name, credential_type, credential_value, description) VALUES (?, ?, ?, ?, ?)'
|
||||
)
|
||||
.run(userId, credentialName, credentialType, credentialValue, description);
|
||||
return {
|
||||
id: result.lastInsertRowid,
|
||||
credentialName,
|
||||
credentialType,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Lists credentials for a user (excluding raw values).
|
||||
* Optionally filters by credential type (e.g. 'github_token').
|
||||
*/
|
||||
getCredentials(
|
||||
userId: number,
|
||||
credentialType: string | null = null
|
||||
): CredentialPublicRow[] {
|
||||
const db = getConnection();
|
||||
|
||||
if (credentialType) {
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? AND credential_type = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId, credentialType) as CredentialPublicRow[];
|
||||
}
|
||||
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, credential_name, credential_type, description, created_at, is_active FROM user_credentials WHERE user_id = ? ORDER BY created_at DESC'
|
||||
)
|
||||
.all(userId) as CredentialPublicRow[];
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns the raw credential value for the most recent active
|
||||
* credential of the given type, or null if none exists.
|
||||
*/
|
||||
getActiveCredential(
|
||||
userId: number,
|
||||
credentialType: string
|
||||
): string | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT credential_value FROM user_credentials WHERE user_id = ? AND credential_type = ? AND is_active = 1 ORDER BY created_at DESC LIMIT 1'
|
||||
)
|
||||
.get(userId, credentialType) as { credential_value: string } | undefined;
|
||||
return row?.credential_value ?? null;
|
||||
},
|
||||
|
||||
/** Permanently removes a credential. Returns true if a row was deleted. */
|
||||
deleteCredential(userId: number, credentialId: number): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('DELETE FROM user_credentials WHERE id = ? AND user_id = ?')
|
||||
.run(credentialId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
|
||||
/** Enables or disables a credential without deleting it. */
|
||||
toggleCredential(
|
||||
userId: number,
|
||||
credentialId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare(
|
||||
'UPDATE user_credentials SET is_active = ? WHERE id = ? AND user_id = ?'
|
||||
)
|
||||
.run(isActive ? 1 : 0, credentialId, userId);
|
||||
return result.changes > 0;
|
||||
},
|
||||
};
|
||||
100
server/modules/database/repositories/github-tokens.ts
Normal file
100
server/modules/database/repositories/github-tokens.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* GitHub tokens repository.
|
||||
*
|
||||
* Backward-compatible helper layer over generic credentials storage.
|
||||
* Tokens are stored in `user_credentials` with `credential_type = 'github_token'`.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import { credentialsDb } from '@/modules/database/repositories/credentials.js';
|
||||
import type {
|
||||
CredentialPublicRow,
|
||||
CreateCredentialResult,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
const GITHUB_TOKEN_TYPE = 'github_token';
|
||||
|
||||
type CredentialRow = {
|
||||
id: number;
|
||||
user_id: number;
|
||||
credential_name: string;
|
||||
credential_type: string;
|
||||
credential_value: string;
|
||||
description: string | null;
|
||||
created_at: string;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
type GithubTokenLookup = CredentialRow & {
|
||||
github_token: string;
|
||||
};
|
||||
|
||||
export const githubTokensDb = {
|
||||
/** Creates a GitHub token credential entry. */
|
||||
createGithubToken(
|
||||
userId: number,
|
||||
tokenName: string,
|
||||
githubToken: string,
|
||||
description: string | null = null
|
||||
): CreateCredentialResult {
|
||||
return credentialsDb.createCredential(
|
||||
userId,
|
||||
tokenName,
|
||||
GITHUB_TOKEN_TYPE,
|
||||
githubToken,
|
||||
description
|
||||
);
|
||||
},
|
||||
|
||||
/** Returns all GitHub tokens (safe shape: no credential value). */
|
||||
getGithubTokens(userId: number): CredentialPublicRow[] {
|
||||
return credentialsDb.getCredentials(userId, GITHUB_TOKEN_TYPE);
|
||||
},
|
||||
|
||||
/** Returns the most recent active GitHub token value for a user. */
|
||||
getActiveGithubToken(userId: number): string | null {
|
||||
return credentialsDb.getActiveCredential(userId, GITHUB_TOKEN_TYPE);
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a specific active GitHub token row by id/user, including
|
||||
* a `github_token` compatibility field.
|
||||
*/
|
||||
getGithubTokenById(userId: number, tokenId: number): GithubTokenLookup | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT *
|
||||
FROM user_credentials
|
||||
WHERE id = ? AND user_id = ? AND credential_type = ? AND is_active = 1`
|
||||
)
|
||||
.get(tokenId, userId, GITHUB_TOKEN_TYPE) as CredentialRow | undefined;
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
...row,
|
||||
github_token: row.credential_value,
|
||||
};
|
||||
},
|
||||
|
||||
/** Updates active state for a GitHub token. */
|
||||
updateGithubToken(
|
||||
userId: number,
|
||||
tokenId: number,
|
||||
isActive: boolean
|
||||
): boolean {
|
||||
return credentialsDb.toggleCredential(userId, tokenId, isActive);
|
||||
},
|
||||
|
||||
/** Deletes a GitHub token. */
|
||||
deleteGithubToken(userId: number, tokenId: number): boolean {
|
||||
return credentialsDb.deleteCredential(userId, tokenId);
|
||||
},
|
||||
|
||||
// Legacy alias used by existing routes
|
||||
toggleGithubToken(userId: number, tokenId: number, isActive: boolean): boolean {
|
||||
return githubTokensDb.updateGithubToken(userId, tokenId, isActive);
|
||||
},
|
||||
};
|
||||
|
||||
103
server/modules/database/repositories/notification-preferences.ts
Normal file
103
server/modules/database/repositories/notification-preferences.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* Notification preferences repository.
|
||||
*
|
||||
* Stores per-user notification channel/event preferences as JSON.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type NotificationPreferences = {
|
||||
channels: {
|
||||
inApp: boolean;
|
||||
webPush: boolean;
|
||||
};
|
||||
events: {
|
||||
actionRequired: boolean;
|
||||
stop: boolean;
|
||||
error: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
const DEFAULT_NOTIFICATION_PREFERENCES: NotificationPreferences = {
|
||||
channels: {
|
||||
inApp: false,
|
||||
webPush: false,
|
||||
},
|
||||
events: {
|
||||
actionRequired: true,
|
||||
stop: true,
|
||||
error: true,
|
||||
},
|
||||
};
|
||||
|
||||
function normalizeNotificationPreferences(value: unknown): NotificationPreferences {
|
||||
const source = value && typeof value === 'object' ? (value as Record<string, any>) : {};
|
||||
|
||||
return {
|
||||
channels: {
|
||||
inApp: source.channels?.inApp === true,
|
||||
webPush: source.channels?.webPush === true,
|
||||
},
|
||||
events: {
|
||||
actionRequired: source.events?.actionRequired !== false,
|
||||
stop: source.events?.stop !== false,
|
||||
error: source.events?.error !== false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const notificationPreferencesDb = {
|
||||
/** Returns the normalized preferences for a user, creating defaults on first read. */
|
||||
getNotificationPreferences(userId: number): NotificationPreferences {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT preferences_json FROM user_notification_preferences WHERE user_id = ?'
|
||||
)
|
||||
.get(userId) as { preferences_json: string } | undefined;
|
||||
|
||||
if (!row) {
|
||||
const defaults = normalizeNotificationPreferences(DEFAULT_NOTIFICATION_PREFERENCES);
|
||||
db.prepare(
|
||||
'INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)'
|
||||
).run(userId, JSON.stringify(defaults));
|
||||
return defaults;
|
||||
}
|
||||
|
||||
let parsed: unknown = DEFAULT_NOTIFICATION_PREFERENCES;
|
||||
try {
|
||||
parsed = JSON.parse(row.preferences_json);
|
||||
} catch {
|
||||
parsed = DEFAULT_NOTIFICATION_PREFERENCES;
|
||||
}
|
||||
return normalizeNotificationPreferences(parsed);
|
||||
},
|
||||
|
||||
/** Upserts normalized preferences for a user and returns the stored value. */
|
||||
updateNotificationPreferences(
|
||||
userId: number,
|
||||
preferences: unknown
|
||||
): NotificationPreferences {
|
||||
const normalized = normalizeNotificationPreferences(preferences);
|
||||
const db = getConnection();
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO user_notification_preferences (user_id, preferences_json, updated_at)
|
||||
VALUES (?, ?, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT(user_id) DO UPDATE SET
|
||||
preferences_json = excluded.preferences_json,
|
||||
updated_at = CURRENT_TIMESTAMP`
|
||||
).run(userId, JSON.stringify(normalized));
|
||||
|
||||
return normalized;
|
||||
},
|
||||
|
||||
// Legacy aliases used by existing services/routes
|
||||
getPreferences(userId: number): NotificationPreferences {
|
||||
return notificationPreferencesDb.getNotificationPreferences(userId);
|
||||
},
|
||||
updatePreferences(userId: number, preferences: unknown): NotificationPreferences {
|
||||
return notificationPreferencesDb.updateNotificationPreferences(userId, preferences);
|
||||
},
|
||||
};
|
||||
|
||||
138
server/modules/database/repositories/projects.db.ts
Normal file
138
server/modules/database/repositories/projects.db.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type ProjectRow = {
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name: string | null;
|
||||
isStarred: number;
|
||||
isArchived: number;
|
||||
};
|
||||
|
||||
export const projectsDb = {
|
||||
createProjectPath(projectPath: string, customProjectName: string | null = null): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(project_path) DO UPDATE SET
|
||||
custom_project_name = CASE
|
||||
WHEN projects.custom_project_name IS NULL OR projects.custom_project_name = ''
|
||||
THEN excluded.custom_project_name
|
||||
ELSE projects.custom_project_name
|
||||
END
|
||||
`).run(randomUUID(), projectPath, customProjectName);
|
||||
},
|
||||
|
||||
getProjectPath(projectPath: string): ProjectRow | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(projectPath) as ProjectRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getProjectById(projectId: string): ProjectRow | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_id = ?
|
||||
`).get(projectId) as ProjectRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getProjectPaths(): ProjectRow[] {
|
||||
const db = getConnection();
|
||||
return db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
`).all() as ProjectRow[];
|
||||
},
|
||||
|
||||
getCustomProjectName(projectPath: string): string | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT custom_project_name
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(projectPath) as Pick<ProjectRow, 'custom_project_name'> | undefined;
|
||||
|
||||
return row?.custom_project_name ?? null;
|
||||
},
|
||||
|
||||
updateCustomProjectName(projectPath: string, customProjectName: string | null): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(project_path) DO UPDATE SET custom_project_name = excluded.custom_project_name
|
||||
`).run(randomUUID(), projectPath, customProjectName);
|
||||
},
|
||||
|
||||
updateCustomProjectNameById(projectId: string, customProjectName: string | null): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET custom_project_name = ?
|
||||
WHERE project_id = ?
|
||||
`).run(customProjectName, projectId);
|
||||
},
|
||||
|
||||
updateProjectIsStarred(projectPath: string, isStarred: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isStarred = ?
|
||||
WHERE project_path = ?
|
||||
`).run(isStarred ? 1 : 0, projectPath);
|
||||
},
|
||||
|
||||
updateProjectIsStarredById(projectId: string, isStarred: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isStarred = ?
|
||||
WHERE project_id = ?
|
||||
`).run(isStarred ? 1 : 0, projectId);
|
||||
},
|
||||
|
||||
updateProjectIsArchived(projectPath: string, isArchived: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isArchived = ?
|
||||
WHERE project_path = ?
|
||||
`).run(isArchived ? 1 : 0, projectPath);
|
||||
},
|
||||
|
||||
updateProjectIsArchivedById(projectId: string, isArchived: boolean): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
UPDATE projects
|
||||
SET isArchived = ?
|
||||
WHERE project_id = ?
|
||||
`).run(isArchived ? 1 : 0, projectId);
|
||||
},
|
||||
|
||||
deleteProjectPath(projectPath: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
DELETE FROM projects
|
||||
WHERE project_path = ?
|
||||
`).run(projectPath);
|
||||
},
|
||||
|
||||
deleteProjectById(projectId: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
DELETE FROM projects
|
||||
WHERE project_id = ?
|
||||
`).run(projectId);
|
||||
},
|
||||
};
|
||||
80
server/modules/database/repositories/push-subscriptions.ts
Normal file
80
server/modules/database/repositories/push-subscriptions.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* Push subscriptions repository.
|
||||
*
|
||||
* Persists browser push subscription endpoints and keys per user.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type PushSubscriptionLookupRow = {
|
||||
endpoint: string;
|
||||
keys_p256dh: string;
|
||||
keys_auth: string;
|
||||
};
|
||||
|
||||
export const pushSubscriptionsDb = {
|
||||
/** Upserts a push subscription endpoint for a user. */
|
||||
createPushSubscription(
|
||||
userId: number,
|
||||
endpoint: string,
|
||||
keysP256dh: string,
|
||||
keysAuth: string
|
||||
): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
`INSERT INTO push_subscriptions (user_id, endpoint, keys_p256dh, keys_auth)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(endpoint) DO UPDATE SET
|
||||
user_id = excluded.user_id,
|
||||
keys_p256dh = excluded.keys_p256dh,
|
||||
keys_auth = excluded.keys_auth`
|
||||
).run(userId, endpoint, keysP256dh, keysAuth);
|
||||
},
|
||||
|
||||
/** Returns all subscriptions for a user. */
|
||||
getPushSubscriptions(userId: number): PushSubscriptionLookupRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT endpoint, keys_p256dh, keys_auth FROM push_subscriptions WHERE user_id = ?'
|
||||
)
|
||||
.all(userId) as PushSubscriptionLookupRow[];
|
||||
},
|
||||
|
||||
/** Deletes one subscription by endpoint. */
|
||||
deletePushSubscription(endpoint: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE endpoint = ?').run(endpoint);
|
||||
},
|
||||
|
||||
/** Deletes all subscriptions for a user. */
|
||||
deletePushSubscriptionsForUser(userId: number): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM push_subscriptions WHERE user_id = ?').run(userId);
|
||||
},
|
||||
|
||||
// Legacy aliases used by existing services/routes
|
||||
saveSubscription(
|
||||
userId: number,
|
||||
endpoint: string,
|
||||
keysP256dh: string,
|
||||
keysAuth: string
|
||||
): void {
|
||||
pushSubscriptionsDb.createPushSubscription(
|
||||
userId,
|
||||
endpoint,
|
||||
keysP256dh,
|
||||
keysAuth
|
||||
);
|
||||
},
|
||||
getSubscriptions(userId: number): PushSubscriptionLookupRow[] {
|
||||
return pushSubscriptionsDb.getPushSubscriptions(userId);
|
||||
},
|
||||
removeSubscription(endpoint: string): void {
|
||||
pushSubscriptionsDb.deletePushSubscription(endpoint);
|
||||
},
|
||||
removeAllForUser(userId: number): void {
|
||||
pushSubscriptionsDb.deletePushSubscriptionsForUser(userId);
|
||||
},
|
||||
};
|
||||
|
||||
41
server/modules/database/repositories/scan-state.db.ts
Normal file
41
server/modules/database/repositories/scan-state.db.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type ScanStateRow = {
|
||||
last_scanned_at: string;
|
||||
};
|
||||
|
||||
export const scanStateDb = {
|
||||
getLastScannedAt() {
|
||||
const db = getConnection();
|
||||
|
||||
const row = db
|
||||
.prepare(`SELECT last_scanned_at FROM scan_state WHERE id = 1`)
|
||||
.get() as ScanStateRow;
|
||||
|
||||
if (!row) {
|
||||
return null; // Before any scan, the row is undefined.
|
||||
}
|
||||
|
||||
let lastScannedDate: Date | null = null;
|
||||
const lastScannedStr = row.last_scanned_at;
|
||||
|
||||
if (lastScannedStr) {
|
||||
// SQLite CURRENT_TIMESTAMP returns UTC in "YYYY-MM-DD HH:MM:SS" format.
|
||||
// Replace space with 'T' and append 'Z' to parse reliably in JS across all platforms.
|
||||
lastScannedDate = new Date(lastScannedStr.replace(' ', 'T') + 'Z');
|
||||
}
|
||||
|
||||
return lastScannedDate;
|
||||
},
|
||||
|
||||
updateLastScannedAt() {
|
||||
const db = getConnection();
|
||||
|
||||
db.prepare(`
|
||||
INSERT INTO scan_state (id, last_scanned_at)
|
||||
VALUES (1, CURRENT_TIMESTAMP)
|
||||
ON CONFLICT (id)
|
||||
DO UPDATE SET last_scanned_at = CURRENT_TIMESTAMP
|
||||
`).run();
|
||||
}
|
||||
};
|
||||
192
server/modules/database/repositories/sessions.db.ts
Normal file
192
server/modules/database/repositories/sessions.db.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import { projectsDb } from '@/modules/database/repositories/projects.db.js';
|
||||
|
||||
type SessionNameLookupRow = {
|
||||
session_id: string;
|
||||
custom_name: string;
|
||||
};
|
||||
|
||||
type SessionRow = {
|
||||
session_id: string;
|
||||
provider: string;
|
||||
project_path: string | null;
|
||||
jsonl_path: string | null;
|
||||
custom_name: string | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
};
|
||||
|
||||
type SessionMetadataLookupRow = Pick<
|
||||
SessionRow,
|
||||
'session_id' | 'provider' | 'project_path' | 'jsonl_path' | 'custom_name' | 'created_at' | 'updated_at'
|
||||
>;
|
||||
|
||||
function normalizeTimestamp(value?: string): string | null {
|
||||
if (!value) return null;
|
||||
|
||||
const parsed = new Date(value);
|
||||
if (Number.isNaN(parsed.getTime())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsed.toISOString();
|
||||
}
|
||||
|
||||
function normalizeCodexProjectPath(projectPath: string): string {
|
||||
const trimmedPath = projectPath.trim();
|
||||
if (!trimmedPath) {
|
||||
return projectPath;
|
||||
}
|
||||
|
||||
if (process.platform !== 'win32') {
|
||||
return path.normalize(trimmedPath);
|
||||
}
|
||||
|
||||
let strippedPath = trimmedPath;
|
||||
if (strippedPath.startsWith('\\\\?\\UNC\\')) {
|
||||
strippedPath = `\\\\${strippedPath.slice('\\\\?\\UNC\\'.length)}`;
|
||||
} else if (strippedPath.startsWith('\\\\?\\')) {
|
||||
strippedPath = strippedPath.slice('\\\\?\\'.length);
|
||||
}
|
||||
|
||||
return path.win32.normalize(strippedPath);
|
||||
}
|
||||
|
||||
function normalizeProjectPathForProvider(provider: string, projectPath: string): string {
|
||||
if (provider !== 'codex') {
|
||||
return projectPath;
|
||||
}
|
||||
|
||||
return normalizeCodexProjectPath(projectPath);
|
||||
}
|
||||
|
||||
export const sessionsDb = {
|
||||
createSession(
|
||||
sessionId: string,
|
||||
provider: string,
|
||||
projectPath: string,
|
||||
customName?: string,
|
||||
createdAt?: string,
|
||||
updatedAt?: string,
|
||||
jsonlPath?: string | null
|
||||
): void {
|
||||
const db = getConnection();
|
||||
const createdAtValue = normalizeTimestamp(createdAt);
|
||||
const updatedAtValue = normalizeTimestamp(updatedAt);
|
||||
const normalizedProjectPath = normalizeProjectPathForProvider(provider, projectPath);
|
||||
|
||||
// First, ensure the project path is recorded in the projects table,
|
||||
// since it's a foreign key in the sessions table.
|
||||
projectsDb.createProjectPath(normalizedProjectPath);
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO sessions (session_id, provider, custom_name, project_path, jsonl_path, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, COALESCE(?, CURRENT_TIMESTAMP), COALESCE(?, CURRENT_TIMESTAMP))
|
||||
ON CONFLICT(session_id, provider) DO UPDATE SET
|
||||
updated_at = excluded.updated_at,
|
||||
project_path = excluded.project_path,
|
||||
jsonl_path = excluded.jsonl_path,
|
||||
custom_name = COALESCE(excluded.custom_name, sessions.custom_name)`
|
||||
).run(
|
||||
sessionId,
|
||||
provider,
|
||||
customName ?? null,
|
||||
normalizedProjectPath,
|
||||
jsonlPath ?? null,
|
||||
createdAtValue,
|
||||
updatedAtValue
|
||||
);
|
||||
},
|
||||
|
||||
updateSessionCustomName(sessionId: string, customName: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
`UPDATE sessions
|
||||
SET custom_name = ?
|
||||
WHERE session_id = ?`
|
||||
).run(customName, sessionId);
|
||||
},
|
||||
|
||||
createSessionName(sessionId: string, provider: string, customName: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
`UPDATE sessions
|
||||
SET custom_name = ?
|
||||
WHERE session_id = ? AND provider = ?`
|
||||
).run(customName, sessionId, provider);
|
||||
},
|
||||
|
||||
getSessionById(sessionId: string): SessionMetadataLookupRow | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE session_id = ?
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 1`
|
||||
)
|
||||
.get(sessionId) as SessionMetadataLookupRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getAllSessions(): SessionRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions`
|
||||
)
|
||||
.all() as SessionRow[];
|
||||
},
|
||||
|
||||
getSessionsByProjectPath(projectPath: string): SessionRow[] {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
`SELECT session_id, provider, project_path, jsonl_path, custom_name, created_at, updated_at
|
||||
FROM sessions
|
||||
WHERE project_path = ?`
|
||||
)
|
||||
.all(projectPath) as SessionRow[];
|
||||
},
|
||||
|
||||
getSessionName(sessionId: string, provider: string): string | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
`SELECT custom_name
|
||||
FROM sessions
|
||||
WHERE session_id = ? AND provider = ?`
|
||||
)
|
||||
.get(sessionId, provider) as { custom_name: string | null } | undefined;
|
||||
|
||||
return row?.custom_name ?? null;
|
||||
},
|
||||
|
||||
getSessionNames(sessionIds: string[], provider: string): Map<string, string> {
|
||||
if (sessionIds.length === 0) return new Map();
|
||||
|
||||
const db = getConnection();
|
||||
const placeholders = sessionIds.map(() => '?').join(',');
|
||||
const rows = db
|
||||
.prepare(
|
||||
`SELECT session_id, custom_name
|
||||
FROM sessions
|
||||
WHERE session_id IN (${placeholders})
|
||||
AND provider = ?
|
||||
AND custom_name IS NOT NULL`
|
||||
)
|
||||
.all(...sessionIds, provider) as SessionNameLookupRow[];
|
||||
|
||||
return new Map(rows.map((row) => [row.session_id, row.custom_name]));
|
||||
},
|
||||
|
||||
deleteSession(sessionId: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM sessions WHERE session_id = ?').run(sessionId);
|
||||
},
|
||||
};
|
||||
140
server/modules/database/repositories/users.ts
Normal file
140
server/modules/database/repositories/users.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* User repository.
|
||||
*
|
||||
* Provides typed CRUD operations for the `users` table.
|
||||
* This is a single-user system, but the schema supports multiple
|
||||
* users for forward compatibility.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type UserRow = {
|
||||
id: number;
|
||||
username: string;
|
||||
password_hash: string;
|
||||
created_at: string;
|
||||
last_login: string | null;
|
||||
is_active: number;
|
||||
git_name: string | null;
|
||||
git_email: string | null;
|
||||
has_completed_onboarding: number;
|
||||
};
|
||||
|
||||
type UserPublicRow = Pick<UserRow, 'id' | 'username' | 'created_at' | 'last_login'>;
|
||||
|
||||
type UserGitConfig = {
|
||||
git_name: string | null;
|
||||
git_email: string | null;
|
||||
};
|
||||
|
||||
type CreateUserResult = {
|
||||
id: number | bigint;
|
||||
username: string;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Queries
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const userDb = {
|
||||
/** Returns true if at least one user exists in the database. */
|
||||
hasUsers(): boolean {
|
||||
const db = getConnection();
|
||||
const row = db.prepare('SELECT COUNT(*) as count FROM users').get() as {
|
||||
count: number;
|
||||
};
|
||||
return row.count > 0;
|
||||
},
|
||||
|
||||
/** Inserts a new user and returns the created ID + username. */
|
||||
createUser(username: string, passwordHash: string): CreateUserResult {
|
||||
const db = getConnection();
|
||||
const result = db
|
||||
.prepare('INSERT INTO users (username, password_hash) VALUES (?, ?)')
|
||||
.run(username, passwordHash);
|
||||
return { id: result.lastInsertRowid, username };
|
||||
},
|
||||
|
||||
/**
|
||||
* Looks up an active user by username.
|
||||
* Returns the full row (including password hash) for auth verification.
|
||||
*/
|
||||
getUserByUsername(username: string): UserRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare('SELECT * FROM users WHERE username = ? AND is_active = 1')
|
||||
.get(username) as UserRow | undefined;
|
||||
},
|
||||
|
||||
/** Updates the last_login timestamp. Non-fatal — logs but does not throw. */
|
||||
updateLastLogin(userId: number): void {
|
||||
try {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = ?'
|
||||
).run(userId);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error('Failed to update last login', { error: message });
|
||||
}
|
||||
},
|
||||
|
||||
/** Returns public user fields by ID (no password hash). */
|
||||
getUserById(userId: number): UserPublicRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, username, created_at, last_login FROM users WHERE id = ? AND is_active = 1'
|
||||
)
|
||||
.get(userId) as UserPublicRow | undefined;
|
||||
},
|
||||
|
||||
/** Returns the first active user. Used for single-user mode lookups. */
|
||||
getFirstUser(): UserPublicRow | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare(
|
||||
'SELECT id, username, created_at, last_login FROM users WHERE is_active = 1 LIMIT 1'
|
||||
)
|
||||
.get() as UserPublicRow | undefined;
|
||||
},
|
||||
|
||||
/** Stores the user's preferred git name and email. */
|
||||
updateGitConfig(
|
||||
userId: number,
|
||||
gitName: string,
|
||||
gitEmail: string
|
||||
): void {
|
||||
const db = getConnection();
|
||||
db.prepare('UPDATE users SET git_name = ?, git_email = ? WHERE id = ?').run(
|
||||
gitName,
|
||||
gitEmail,
|
||||
userId
|
||||
);
|
||||
},
|
||||
|
||||
/** Retrieves the user's git identity (name + email). */
|
||||
getGitConfig(userId: number): UserGitConfig | undefined {
|
||||
const db = getConnection();
|
||||
return db
|
||||
.prepare('SELECT git_name, git_email FROM users WHERE id = ?')
|
||||
.get(userId) as UserGitConfig | undefined;
|
||||
},
|
||||
|
||||
/** Marks onboarding as complete for the given user. */
|
||||
completeOnboarding(userId: number): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'UPDATE users SET has_completed_onboarding = 1 WHERE id = ?'
|
||||
).run(userId);
|
||||
},
|
||||
|
||||
/** Returns true if the user has finished the onboarding flow. */
|
||||
hasCompletedOnboarding(userId: number): boolean {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare('SELECT has_completed_onboarding FROM users WHERE id = ?')
|
||||
.get(userId) as { has_completed_onboarding: number } | undefined;
|
||||
return row?.has_completed_onboarding === 1;
|
||||
},
|
||||
};
|
||||
57
server/modules/database/repositories/vapid-keys.ts
Normal file
57
server/modules/database/repositories/vapid-keys.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* VAPID keys repository.
|
||||
*
|
||||
* Stores and retrieves the Web Push VAPID key pair.
|
||||
*/
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
|
||||
type VapidKeyRow = {
|
||||
public_key: string;
|
||||
private_key: string;
|
||||
};
|
||||
|
||||
type VapidKeyPair = {
|
||||
publicKey: string;
|
||||
privateKey: string;
|
||||
};
|
||||
|
||||
export const vapidKeysDb = {
|
||||
/** Returns the latest stored VAPID key pair, or null when unset. */
|
||||
getVapidKeys(): VapidKeyPair | null {
|
||||
const db = getConnection();
|
||||
const row = db
|
||||
.prepare(
|
||||
'SELECT public_key, private_key FROM vapid_keys ORDER BY id DESC LIMIT 1'
|
||||
)
|
||||
.get() as Pick<VapidKeyRow, 'public_key' | 'private_key'> | undefined;
|
||||
|
||||
if (!row) return null;
|
||||
return {
|
||||
publicKey: row.public_key,
|
||||
privateKey: row.private_key,
|
||||
};
|
||||
},
|
||||
|
||||
/** Persists a new VAPID key pair. */
|
||||
createVapidKeys(publicKey: string, privateKey: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare(
|
||||
'INSERT INTO vapid_keys (public_key, private_key) VALUES (?, ?)'
|
||||
).run(publicKey, privateKey);
|
||||
},
|
||||
|
||||
/** Replaces all existing keys with a fresh pair. */
|
||||
updateVapidKeys(publicKey: string, privateKey: string): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM vapid_keys').run();
|
||||
vapidKeysDb.createVapidKeys(publicKey, privateKey);
|
||||
},
|
||||
|
||||
/** Deletes all VAPID key rows. */
|
||||
deleteVapidKeys(): void {
|
||||
const db = getConnection();
|
||||
db.prepare('DELETE FROM vapid_keys').run();
|
||||
},
|
||||
};
|
||||
|
||||
151
server/modules/database/schema.ts
Normal file
151
server/modules/database/schema.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
const USER_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_login DATETIME,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
git_name TEXT,
|
||||
git_email TEXT,
|
||||
has_completed_onboarding BOOLEAN DEFAULT 0
|
||||
);
|
||||
`;
|
||||
|
||||
export const API_KEYS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
key_name TEXT NOT NULL,
|
||||
api_key TEXT UNIQUE NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
last_used DATETIME,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const USER_CREDENTIALS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS user_credentials (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
credential_name TEXT NOT NULL,
|
||||
credential_type TEXT NOT NULL, -- 'github_token', 'gitlab_token', 'bitbucket_token', etc.
|
||||
credential_value TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS user_notification_preferences (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
preferences_json TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const VAPID_KEYS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS vapid_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
public_key TEXT NOT NULL,
|
||||
private_key TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
export const PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS push_subscriptions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
endpoint TEXT NOT NULL UNIQUE,
|
||||
keys_p256dh TEXT NOT NULL,
|
||||
keys_auth TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const PROJECTS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
project_id TEXT PRIMARY KEY NOT NULL,
|
||||
project_path TEXT NOT NULL UNIQUE,
|
||||
custom_project_name TEXT DEFAULT NULL,
|
||||
isStarred BOOLEAN DEFAULT 0,
|
||||
isArchived BOOLEAN DEFAULT 0
|
||||
);
|
||||
`;
|
||||
|
||||
export const SESSIONS_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
session_id TEXT NOT NULL,
|
||||
provider TEXT NOT NULL DEFAULT 'claude',
|
||||
custom_name TEXT,
|
||||
project_path TEXT,
|
||||
jsonl_path TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (session_id, provider),
|
||||
FOREIGN KEY (project_path) REFERENCES projects(project_path)
|
||||
ON DELETE SET NULL
|
||||
ON UPDATE CASCADE
|
||||
);
|
||||
`;
|
||||
|
||||
export const LAST_SCANNED_AT_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS scan_state (
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1),
|
||||
last_scanned_at TIMESTAMP NULL
|
||||
);
|
||||
`;
|
||||
|
||||
export const APP_CONFIG_TABLE_SCHEMA_SQL = `
|
||||
CREATE TABLE IF NOT EXISTS app_config (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
export const INIT_SCHEMA_SQL = `
|
||||
-- Initialize authentication database
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
${USER_TABLE_SCHEMA_SQL}
|
||||
-- Indexes for performance for user lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
|
||||
|
||||
${API_KEYS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active);
|
||||
|
||||
${USER_CREDENTIALS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active);
|
||||
|
||||
${USER_NOTIFICATION_PREFERENCES_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_user_notification_preferences_user_id ON user_notification_preferences(user_id);
|
||||
|
||||
${VAPID_KEYS_TABLE_SCHEMA_SQL}
|
||||
|
||||
${PUSH_SUBSCRIPTIONS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_push_subscriptions_user_id ON push_subscriptions(user_id);
|
||||
|
||||
${PROJECTS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_projects_is_starred ON projects(isStarred);
|
||||
CREATE INDEX IF NOT EXISTS idx_projects_is_archived ON projects(isArchived);
|
||||
|
||||
${SESSIONS_TABLE_SCHEMA_SQL}
|
||||
CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_sessions_project_path ON sessions(project_path);
|
||||
|
||||
${LAST_SCANNED_AT_SQL}
|
||||
|
||||
${APP_CONFIG_TABLE_SCHEMA_SQL}
|
||||
`;
|
||||
@@ -9,6 +9,7 @@ import type {
|
||||
UpsertProviderMcpServerInput,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
//----------------- PROVIDER CONTRACT INTERFACES ------------
|
||||
/**
|
||||
* Main provider contract for CLI and SDK integrations.
|
||||
*
|
||||
@@ -22,9 +23,13 @@ export interface IProvider {
|
||||
readonly sessions: IProviderSessions;
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER AUTH INTERFACE ------------
|
||||
/**
|
||||
* Auth contract for one provider.
|
||||
*
|
||||
* Implementations should return a complete installation/authentication status
|
||||
* without throwing for normal "not installed" or "not authenticated" states.
|
||||
*/
|
||||
export interface IProviderAuth {
|
||||
/**
|
||||
@@ -33,8 +38,13 @@ export interface IProviderAuth {
|
||||
getStatus(): Promise<ProviderAuthStatus>;
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER MCP INTERFACE ------------
|
||||
/**
|
||||
* MCP contract for one provider.
|
||||
*
|
||||
* Implementations must map provider-native MCP config formats to shared
|
||||
* `ProviderMcpServer` records used by routes and frontend state.
|
||||
*/
|
||||
export interface IProviderMcp {
|
||||
listServers(options?: { workspacePath?: string }): Promise<Record<McpScope, ProviderMcpServer[]>>;
|
||||
@@ -45,8 +55,13 @@ export interface IProviderMcp {
|
||||
): Promise<{ removed: boolean; provider: LLMProvider; name: string; scope: McpScope }>;
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER SESSION INTERFACE ------------
|
||||
/**
|
||||
* Session/history contract for one provider.
|
||||
*
|
||||
* Implementations normalize provider-specific events and message history into
|
||||
* shared transport shapes consumed by API routes and realtime streams.
|
||||
*/
|
||||
export interface IProviderSessions {
|
||||
normalizeMessage(raw: unknown, sessionId: string | null): NormalizedMessage[];
|
||||
|
||||
@@ -1,18 +1,38 @@
|
||||
// -------------- HTTP API response shapes for the server, shared across modules --------------
|
||||
|
||||
//----------------- HTTP RESPONSE SHAPES ------------
|
||||
/**
|
||||
* Canonical success envelope used by backend APIs that return a structured payload.
|
||||
*
|
||||
* Use this for route handlers that need a stable `success/data` shape so frontend
|
||||
* consumers can parse responses consistently across endpoints.
|
||||
*/
|
||||
export type ApiSuccessShape<TData = unknown> = {
|
||||
success: true;
|
||||
data: TData;
|
||||
};
|
||||
|
||||
/**
|
||||
* Generic plain-object record used when parsing loosely typed JSON payloads.
|
||||
*
|
||||
* Use this only after runtime shape checks, not as a replacement for validated
|
||||
* domain models.
|
||||
*/
|
||||
export type AnyRecord = Record<string, any>;
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER MESSAGE MODEL ------------
|
||||
/**
|
||||
* Providers supported by the unified server runtime.
|
||||
*
|
||||
* Use this as the source of truth whenever a function or payload needs to identify
|
||||
* a specific LLM integration.
|
||||
*/
|
||||
export type LLMProvider = 'claude' | 'codex' | 'gemini' | 'cursor';
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Message/event variants emitted by provider adapters and normalized transports.
|
||||
*
|
||||
* Keep this union in sync with event kinds produced by provider session adapters.
|
||||
*/
|
||||
export type MessageKind =
|
||||
| 'text'
|
||||
| 'tool_use'
|
||||
@@ -30,11 +50,10 @@ export type MessageKind =
|
||||
| 'task_notification';
|
||||
|
||||
/**
|
||||
* Provider-neutral message event emitted over REST and realtime transports.
|
||||
* Provider-neutral message envelope used in REST responses and realtime channels.
|
||||
*
|
||||
* Providers all produce their own native SDK/CLI event shapes, so this type keeps
|
||||
* the common envelope strict while allowing provider-specific details to ride
|
||||
* along as optional properties.
|
||||
* Every provider-specific message must be converted into this shape before being
|
||||
* emitted outside provider-specific modules.
|
||||
*/
|
||||
export type NormalizedMessage = {
|
||||
id: string;
|
||||
@@ -73,21 +92,22 @@ export type NormalizedMessage = {
|
||||
};
|
||||
|
||||
/**
|
||||
* Pagination and provider lookup options for reading persisted session history.
|
||||
* Shared options used to fetch historical provider messages.
|
||||
*
|
||||
* Consumers should pass provider-specific lookup hints (`projectName`, `projectPath`)
|
||||
* only when the selected provider requires them.
|
||||
*/
|
||||
export type FetchHistoryOptions = {
|
||||
/** Claude project folder name. Required by Claude history lookup. */
|
||||
projectName?: string;
|
||||
/** Absolute workspace path. Required by Cursor to compute its chat hash. */
|
||||
projectPath?: string;
|
||||
/** Page size. `null` means all messages. */
|
||||
limit?: number | null;
|
||||
/** Pagination offset from the newest messages. */
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Provider-neutral history result returned by the unified messages endpoint.
|
||||
* Standardized response payload returned from provider history readers.
|
||||
*
|
||||
* Use this as the contract for APIs that return paginated conversation history.
|
||||
*/
|
||||
export type FetchHistoryResult = {
|
||||
messages: NormalizedMessage[];
|
||||
@@ -98,21 +118,40 @@ export type FetchHistoryResult = {
|
||||
tokenUsage?: unknown;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
// ---------------------------
|
||||
//----------------- SHARED ERROR TYPES ------------
|
||||
/**
|
||||
* Optional metadata used when constructing application-level errors.
|
||||
*
|
||||
* `statusCode` should reflect the HTTP response status, while `code` identifies
|
||||
* the stable machine-readable error category.
|
||||
*/
|
||||
export type AppErrorOptions = {
|
||||
code?: string;
|
||||
statusCode?: number;
|
||||
details?: unknown;
|
||||
};
|
||||
|
||||
// -------------------- MCP related shared types --------------------
|
||||
// ---------------------------
|
||||
//----------------- MCP TYPES ------------
|
||||
/**
|
||||
* Scope where an MCP server definition is stored and resolved.
|
||||
*
|
||||
* `user` is global for a user account, `local` is provider-local, and `project`
|
||||
* is tied to a specific project path.
|
||||
*/
|
||||
export type McpScope = 'user' | 'local' | 'project';
|
||||
|
||||
/**
|
||||
* Transport protocol used by an MCP server definition.
|
||||
*/
|
||||
export type McpTransport = 'stdio' | 'http' | 'sse';
|
||||
|
||||
/**
|
||||
* Provider MCP server descriptor normalized for frontend consumption.
|
||||
* Normalized MCP server model exposed to frontend and route handlers.
|
||||
*
|
||||
* Provider adapters should map provider-native config to this structure before
|
||||
* returning results.
|
||||
*/
|
||||
export type ProviderMcpServer = {
|
||||
provider: LLMProvider;
|
||||
@@ -131,7 +170,10 @@ export type ProviderMcpServer = {
|
||||
};
|
||||
|
||||
/**
|
||||
* Shared payload shape for MCP server create/update operations.
|
||||
* Payload for create/update MCP server operations.
|
||||
*
|
||||
* Routes and services should accept this type, validate it, and then persist it
|
||||
* through provider-specific MCP repositories.
|
||||
*/
|
||||
export type UpsertProviderMcpServerInput = {
|
||||
name: string;
|
||||
@@ -149,18 +191,13 @@ export type UpsertProviderMcpServerInput = {
|
||||
envHttpHeaders?: Record<string, string>;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
// -------------------- Provider auth status types --------------------
|
||||
// ---------------------------
|
||||
//----------------- PROVIDER AUTH TYPES ------------
|
||||
/**
|
||||
* Result of a provider status check (installation + authentication).
|
||||
* Authentication status result returned by provider health checks.
|
||||
*
|
||||
* installed - Whether the provider's CLI/SDK is available
|
||||
* provider - Provider id the status belongs to
|
||||
* authenticated - Whether valid credentials exist
|
||||
* email - User email or auth method identifier
|
||||
* method - Auth method (e.g. 'api_key', 'credentials_file')
|
||||
* [error] - Error message if not installed or not authenticated
|
||||
* This shape is consumed by settings/status endpoints to report installation and
|
||||
* credential state for each provider.
|
||||
*/
|
||||
export type ProviderAuthStatus = {
|
||||
installed: boolean;
|
||||
@@ -170,3 +207,32 @@ export type ProviderAuthStatus = {
|
||||
method: string | null;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
// ---------------------------
|
||||
//----------------- SHARED DATABASE CREDENTIAL TYPES ------------
|
||||
/**
|
||||
* Safe credential view returned by credential listing APIs.
|
||||
*
|
||||
* This intentionally excludes the raw credential secret while still exposing
|
||||
* metadata needed for UI rendering and management operations.
|
||||
*/
|
||||
export type CredentialPublicRow = {
|
||||
id: number;
|
||||
credential_name: string;
|
||||
credential_type: string;
|
||||
description: string | null;
|
||||
created_at: string;
|
||||
is_active: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Result returned after creating a credential record.
|
||||
*
|
||||
* Use this return shape when callers need the created id and display metadata,
|
||||
* but must never receive the stored secret value.
|
||||
*/
|
||||
export type CreateCredentialResult = {
|
||||
id: number | bigint;
|
||||
credentialName: string;
|
||||
credentialType: string;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { mkdir, readFile, writeFile } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
@@ -12,6 +11,14 @@ import type {
|
||||
NormalizedMessage,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
//----------------- NORMALIZED MESSAGE HELPER INPUT TYPES ------------
|
||||
/**
|
||||
* Input payload accepted by `createNormalizedMessage`.
|
||||
*
|
||||
* Callers provide provider-specific fields plus the required `kind/provider`
|
||||
* pair; this helper fills missing envelope fields (`id`, `sessionId`,
|
||||
* `timestamp`) in a consistent way.
|
||||
*/
|
||||
type NormalizedMessageInput =
|
||||
{
|
||||
kind: NormalizedMessage['kind'];
|
||||
@@ -21,6 +28,14 @@ type NormalizedMessageInput =
|
||||
timestamp?: string | null;
|
||||
} & Record<string, unknown>;
|
||||
|
||||
// ---------------------------
|
||||
//----------------- HTTP HANDLER UTILITIES ------------
|
||||
/**
|
||||
* Wraps arbitrary data in the standard API success envelope.
|
||||
*
|
||||
* Use this helper in route handlers to keep successful JSON responses consistent
|
||||
* across endpoints.
|
||||
*/
|
||||
export function createApiSuccessResponse<TData>(
|
||||
data: TData,
|
||||
): ApiSuccessShape<TData> {
|
||||
@@ -30,6 +45,12 @@ export function createApiSuccessResponse<TData>(
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an async Express handler into a standard `RequestHandler` and routes
|
||||
* rejected promises to Express error middleware.
|
||||
*
|
||||
* Use this to avoid repeating `try/catch(next)` in every async route.
|
||||
*/
|
||||
export function asyncHandler(
|
||||
handler: (req: Request, res: Response, next: NextFunction) => Promise<unknown>
|
||||
): RequestHandler {
|
||||
@@ -38,7 +59,14 @@ export function asyncHandler(
|
||||
};
|
||||
}
|
||||
|
||||
// --------- Global app error class for consistent error handling across the server ---------
|
||||
// ---------------------------
|
||||
//----------------- SHARED ERROR UTILITIES ------------
|
||||
/**
|
||||
* Shared application error with HTTP status and machine-readable code metadata.
|
||||
*
|
||||
* Throw this from service/route layers when the caller should receive a
|
||||
* controlled error response rather than a generic 500.
|
||||
*/
|
||||
export class AppError extends Error {
|
||||
readonly code: string;
|
||||
readonly statusCode: number;
|
||||
@@ -53,9 +81,8 @@ export class AppError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
|
||||
// ------------------------ Normalized provider message helpers ------------------------
|
||||
// ---------------------------
|
||||
//----------------- NORMALIZED PROVIDER MESSAGE UTILITIES ------------
|
||||
/**
|
||||
* Generates a stable unique id for normalized provider messages.
|
||||
*/
|
||||
@@ -80,9 +107,8 @@ export function createNormalizedMessage(fields: NormalizedMessageInput): Normali
|
||||
};
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
|
||||
// ------------------------ The following are mainly for provider MCP runtimes ------------------------
|
||||
// ---------------------------
|
||||
//----------------- MCP CONFIG PARSING UTILITIES ------------
|
||||
/**
|
||||
* Safely narrows an unknown value to a plain object record.
|
||||
*
|
||||
@@ -189,5 +215,3 @@ export const writeJsonConfig = async (filePath: string, data: Record<string, unk
|
||||
await writeFile(filePath, `${JSON.stringify(data, null, 2)}\n`, 'utf8');
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
Reference in New Issue
Block a user