feat: implement basic file watcher and session updater

This commit is contained in:
Haileyesus
2026-03-25 10:46:08 +03:00
parent f187e22976
commit 3e268e201a
18 changed files with 1448 additions and 377 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,165 @@
import chokidar from "chokidar";
import path from "path";
import os from "os";
import { promises as fsPromises } from "fs";
import { logger } from "@/shared/utils/logger.js";
import {
processClaudeSessionFile,
processCodexSessionFile,
processGeminiSessionFile,
processCursorSessionFile,
getSessions
} from "@/modules/workspace/get-workspaces/get-workspaces.js";
import { sessionsDb } from "@/shared/database/repositories/sessions.db.js";
import { LLMProvider } from "@/shared/types/app.js";
let projectsWatchers = [];
// File system watchers for provider project/session folders
const PROVIDER_WATCH_PATHS: { provider: LLMProvider; rootPath: string }[] = [
{
provider: "claude",
rootPath: path.join(os.homedir(), ".claude", "projects"),
},
{
provider: "cursor",
rootPath: path.join(os.homedir(), ".cursor", "chats")
},
{
provider: "codex",
rootPath: path.join(os.homedir(), ".codex", "sessions"),
},
{
provider: "gemini",
rootPath: path.join(os.homedir(), ".gemini", "sessions"),
},
];
const WATCHER_IGNORED_PATTERNS = [
"**/node_modules/**",
"**/.git/**",
"**/dist/**",
"**/build/**",
"**/*.tmp",
"**/*.swp",
"**/.DS_Store",
];
type EventType = "add" | "change" | "unlink" | "addDir" | "unlinkDir";
const onUpdate = async (
eventType: EventType,
filePath: string,
provider: LLMProvider,
) => {
try {
console.log("[eventType] detected: ", eventType, " filePath: ", filePath, " provider: ", provider);
switch (eventType) {
case "add":
case "change": {
let sessionId: string | null = null;
let workspacePath: string | null = null;
let sessionName = `Untitled ${provider} Session`;
switch (provider) {
case "claude": {
const result = await processClaudeSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
case "codex": {
const result = await processCodexSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
case "gemini": {
const result = await processGeminiSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
case "cursor": {
const result = await processCursorSessionFile(filePath);
if (result) {
sessionId = result.sessionId;
workspacePath = result.workspacePath;
sessionName = result.sessionName || sessionName;
}
break;
}
}
if (sessionId && workspacePath) {
sessionsDb.createSession(sessionId, provider, workspacePath, sessionName);
}
break;
}
}
} catch (error: any) {
logger.error(
`[ERROR] Failed to handle ${provider} file change for ${filePath}:`,
error,
);
}
};
// Setup file system watchers for Claude, Cursor, and Codex project/session folders
export async function initializeWatcher() {
logger.info("Setting up project watchers for providers...");
await getSessions();
for (const { provider, rootPath } of PROVIDER_WATCH_PATHS) {
try {
// chokidar v4 emits ENOENT via the "error" event for missing roots and will not auto-recover.
// Ensure provider folders exist before creating the watcher so watching stays active.
await fsPromises.mkdir(rootPath, { recursive: true });
logger.info(`Setting up watcher for ${provider} at: ${rootPath}`);
const watcher = chokidar.watch(rootPath, {
ignored: WATCHER_IGNORED_PATTERNS,
persistent: true,
ignoreInitial: true, // Don't fire events for existing files on startup
followSymlinks: false,
depth: 6, // Reasonable depth limit
usePolling: true, // Use polling to fix Windows fs.watch buffering/batching issues. It now stops relying on the OS's native file-system events and instead manually checks the files for changes at a set interval.
interval: 2000, // Poll every 2000ms
binaryInterval: 6000, // We set a high amount because checking large binary files for changes using polling is much more CPU-intensive than checking small text files.
// Removed awaitWriteFinish to prevent delays when LLM streams to the file
});
// Set up event listeners
watcher
.on("add", (filePath) => onUpdate("add", filePath, provider))
.on("change", (filePath) =>
onUpdate("change", filePath, provider),
)
.on("error", (error: any) => {
logger.error(`[ERROR] ${provider} watcher error: ${error.message}`);
})
.on("ready", () => { });
projectsWatchers.push(watcher);
} catch (error: any) {
logger.error(
`[ERROR] Failed to setup ${provider} watcher for ${rootPath}:`,
error,
);
}
}
}

View File

@@ -0,0 +1,324 @@
import os from 'os';
import path from 'path';
import fs from 'node:fs';
import fsp from 'node:fs/promises';
import readline from 'readline';
import { sessionsDb } from '@/shared/database/repositories/sessions.db.js';
import crypto from 'node:crypto';
import { scanStateDb } from '@/shared/database/repositories/scan-state.db.js';
// ============================================================================
// 1. SHARED TYPES & UTILITIES
// ============================================================================
// By extracting file traversal and JSONL parsing, we remove 80% of the duplication.
type SessionData = {
sessionId: string;
workspacePath: string;
sessionName?: string;
}
/**
* Reads a JSONL file and builds a Map of Key -> Value.
* Useful for index files like history.jsonl or session_index.jsonl.
*/
export async function buildLookupMap(filePath: string, keyField: string, valueField: string): Promise<Map<string, string>> {
const lookup = new Map<string, string>();
try {
const fileStream = fs.createReadStream(filePath);
const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
for await (const line of rl) {
if (!line.trim()) continue;
const data = JSON.parse(line);
// We use the first occurrence. In history files, this is usually the start of the thread.
if (data[keyField] && data[valueField] && !lookup.has(data[keyField])) {
lookup.set(data[keyField], data[valueField]);
}
}
} catch (e) { /* File might not exist yet */ }
return lookup;
}
/**
* Recursively walks a directory tree and returns a flat array of all files
* matching a specific extension (e.g., '.jsonl' or '.json').
* It will only find the files created after
*/
async function findFilesRecursivelyCreatedAfterLastScan(
dirPath: string,
extension: string,
fileList: string[] = []
): Promise<string[]> {
try {
const entries = await fsp.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
await findFilesRecursivelyCreatedAfterLastScan(fullPath, extension, fileList);
} else if (entry.isFile() && entry.name.endsWith(extension)) {
const lastScanDate = scanStateDb.getLastScannedAt();
if (lastScanDate) {
// Check file CREATION time (birthtime) against our last scan time
const stats = await fsp.stat(fullPath);
if (stats.birthtime > lastScanDate) {
fileList.push(fullPath);
console.log("=====> full path is: ", fullPath)
}
} else {
fileList.push(fullPath);
}
}
}
} catch (e) {
// Fail silently for directories that don't exist or lack read permissions
}
return fileList;
}
/**
* Reads a file line-by-line, parsing each line as JSON.
* It passes the parsed JSON to a custom `extractorFn`. As soon as the extractor
* successfully finds both a sessionId and workspacePath, it closes the file and returns.
*/
export async function extractFirstValidJsonlData(
filePath: string,
extractorFn: (parsedJson: any) => Partial<SessionData> | null | undefined
): Promise<SessionData | null> {
try {
const fileStream = fs.createReadStream(filePath);
const rl = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
for await (const line of rl) {
if (!line.trim()) continue;
const parsedData = JSON.parse(line);
const extracted = extractorFn(parsedData);
// If our custom extractor found what we need, return early
if (extracted?.sessionId && extracted?.workspacePath) {
rl.close();
fileStream.close();
return extracted as SessionData;
}
}
} catch (e) {
// Ignored errors
}
return null;
}
// ============================================================================
// 2. JSONL-BASED PROVIDERS (Claude & Codex)
// ============================================================================
// Now, these functions only need to define WHERE to look, and HOW to map the JSON.
// ----- Claude -----
export async function processClaudeSessionFile(file: string, nameMap?: Map<string, string>): Promise<SessionData | null> {
if (!nameMap) {
const base = path.join(os.homedir(), '.claude');
nameMap = await buildLookupMap(path.join(base, 'history.jsonl'), 'sessionId', 'display');
}
// Claude puts cwd and sessionId directly on the root object
return extractFirstValidJsonlData(file, (data) => ({
workspacePath: data?.cwd,
sessionId: data?.sessionId,
sessionName: nameMap!.get(data?.sessionId) || 'Untitled Claude Session'
}));
}
async function getClaudeSessions() {
const base = path.join(os.homedir(), '.claude');
// Pre-load names from history index
const nameMap = await buildLookupMap(path.join(base, 'history.jsonl'), 'sessionId', 'display');
const files = await findFilesRecursivelyCreatedAfterLastScan(path.join(base, 'projects'), '.jsonl');
for (const file of files) {
const result = await processClaudeSessionFile(file, nameMap);
if (result) {
sessionsDb.createSession(result.sessionId, 'claude', result.workspacePath, result.sessionName);
}
}
}
// ----- Codex -----
export async function processCodexSessionFile(file: string, nameMap?: Map<string, string>): Promise<SessionData | null> {
if (!nameMap) {
const base = path.join(os.homedir(), '.codex');
nameMap = await buildLookupMap(path.join(base, 'session_index.jsonl'), 'id', 'thread_name');
}
// Codex nests the required data inside a `payload` object
return extractFirstValidJsonlData(file, (data) => ({
workspacePath: data?.payload?.cwd,
sessionId: data?.payload?.id,
sessionName: nameMap!.get(data?.payload?.id) || 'Untitled Codex Session'
}));
}
async function getCodexSessions() {
const base = path.join(os.homedir(), '.codex');
// Use the thread_name attribute as requested
const nameMap = await buildLookupMap(path.join(base, 'session_index.jsonl'), 'id', 'thread_name');
const files = await findFilesRecursivelyCreatedAfterLastScan(path.join(base, 'sessions'), '.jsonl');
for (const file of files) {
const result = await processCodexSessionFile(file, nameMap);
if (result) {
sessionsDb.createSession(result.sessionId, 'codex', result.workspacePath, result.sessionName);
}
}
}
// ============================================================================
// 3. STANDARD JSON PROVIDERS (Gemini)
// ============================================================================
// ----- Gemini -----
export async function processGeminiSessionFile(file: string): Promise<SessionData | null> {
try {
// Gemini uses standard JSON (not JSONL), so we read the whole file at once
const fileContent = await fsp.readFile(file, 'utf8');
const data = JSON.parse(fileContent);
if (data?.id && data?.projectPath) {
return {
sessionId: data.id,
workspacePath: data.projectPath,
sessionName: data.messages?.[0]?.content || 'New Gemini Chat'
};
}
} catch (e) {
// Ignore parsing error for gemini
}
return null;
}
async function getGeminiSessions() {
const geminiPath = path.join(os.homedir(), '.gemini', 'sessions');
const files = await findFilesRecursivelyCreatedAfterLastScan(geminiPath, '.json');
for (const file of files) {
const result = await processGeminiSessionFile(file);
if (result) {
sessionsDb.createSession(result.sessionId, 'gemini', result.workspacePath, result.sessionName);
}
}
}
// ============================================================================
// 4. COMPLEX CUSTOM PROVIDERS (Cursor)
// ============================================================================
// ----- Cursor -----
function md5(input: string): string {
return crypto.createHash('md5').update(input).digest('hex');
}
export async function extractWorkspacePathFromWorkerLog(filePath: string): Promise<string | null> {
try {
const fileStream = fs.createReadStream(filePath, { encoding: 'utf8' });
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
for await (const line of rl) {
const match = line.match(/workspacePath=(.*)$/);
const firstMatch = match?.[1];
if (firstMatch) {
rl.close();
fileStream.close();
return firstMatch;
}
}
} catch {
// ignore errors
}
return null;
}
export async function processCursorSessionFile(file: string): Promise<SessionData | null> {
const sessionId = path.basename(file, '.jsonl');
const grandparentDir = path.dirname(path.dirname(file));
const workerLogPath = path.join(grandparentDir, 'worker.log');
const workspacePath = await extractWorkspacePathFromWorkerLog(workerLogPath);
if (!workspacePath) return null;
return extractFirstValidJsonlData(file, (lineJson) => {
if (lineJson.role === 'user') {
const rawText = lineJson.message?.content?.[0]?.text || '';
// Strip <user_query> tags and trim
const cleanName = rawText.replace(/<\/?user_query>/g, '').trim().split('\n');
return { sessionId: sessionId as string, workspacePath, sessionName: cleanName[0] || "Untitled Cursor Session" };
}
return null;
});
}
async function getCursorSessions() {
try {
const cursorBase = path.join(os.homedir(), '.cursor');
const projectsDir = path.join(cursorBase, 'projects');
const projectDirs = await fsp.readdir(projectsDir);
const seenWorkspacePaths = new Set<string>();
for (const projectDir of projectDirs) {
const workerLogPath = path.join(projectsDir, projectDir, 'worker.log');
const workspacePath = await extractWorkspacePathFromWorkerLog(workerLogPath);
if (!workspacePath || seenWorkspacePaths.has(workspacePath)) continue;
seenWorkspacePaths.add(workspacePath);
const workspaceHash = md5(workspacePath);
const chatsDir = path.join(cursorBase, 'chats', workspaceHash);
const sessionFiles = await findFilesRecursivelyCreatedAfterLastScan(chatsDir, '.jsonl');
for (const file of sessionFiles) {
const result = await processCursorSessionFile(file);
if (result) {
sessionsDb.createSession(result.sessionId, 'cursor', result.workspacePath, result.sessionName);
}
}
}
} catch (e) {
// Base cursor directory or projects directory likely doesn't exist
}
}
export async function getSessions() {
// 1. Start the timer with a unique label
console.time("🚀 Workspace sync total time");
console.log("Starting workspace sync...");
try {
// Wrapping in Promise.all allows these to process concurrently, speeding up the boot time
await Promise.allSettled([
getClaudeSessions(),
getCodexSessions(),
getGeminiSessions(),
getCursorSessions()
]);
scanStateDb.updateLastScannedAt();
} catch (error) {
console.error("An error occurred during sync:", error);
} finally {
console.log("----------------------------------");
// 2. Stop the timer using the exact same label
// This will print: 🚀 Workspace sync total time: 123.456ms
console.timeEnd("🚀 Workspace sync total time");
console.log("Workspace synchronization complete.");
}
}

View File

View File

View File

View File

@@ -1,6 +1,37 @@
import express from 'express';
import http from 'http';
import { userDb } from "@/shared/database/repositories/users.js";
import { initializeDatabase } from '@/shared/database/init-db.js';
import { initializeWatcher } from '@/modules/watcher/file-watcher.js';
console.log("----------------Hello there, Refactored Runner!-------------------");
console.log("User db initialized");
console.log("First user:", userDb.getFirstUser());
const app = express();
const server = http.createServer(app);
const serverPortEnv = process.env.SERVER_PORT;
const SERVER_PORT = serverPortEnv ? Number.parseInt(serverPortEnv) : 3001;
if (Number.isNaN(SERVER_PORT)) {
throw new Error(`Invalid SERVER_PORT value: ${serverPortEnv}`);
}
const HOST = process.env.HOST || '0.0.0.0';
async function main() {
try {
await initializeDatabase();
server.listen(SERVER_PORT, HOST, async () => {
console.log(`Server is running on http://${HOST}:${SERVER_PORT}`);
await initializeWatcher();
});
} catch (error) {
console.error("Failed to initialize database:", error);
process.exit(1);
}
}
await main();

View File

@@ -9,7 +9,7 @@ export const initializeDatabase = async () => {
const db = getConnection();
db.exec(INIT_SCHEMA_SQL);
logger.info('Database schema applied');
runMigrations(db);
runMigrations(db); // ? If we rename the database to something new, would a migration be still necessary?
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
logger.error('Database initialization failed', { error: message });

View File

@@ -1,5 +1,5 @@
import { Database } from "better-sqlite3";
import { APP_CONFIG_TABLE_SCHEMA_SQL, SESSION_NAMES_TABLE_SCHEMA_SQL } from "@/shared/database/schema.js";
import { APP_CONFIG_TABLE_SCHEMA_SQL, LAST_SCANNED_AT_SQL, SESSIONS_TABLE_SCHEMA_SQL, WORK_SPACE_PATH_SQL } from "@/shared/database/schema.js";
import { logger } from "@/shared/utils/logger.js";
const addColumnToUsersTableIfNotExists = (
@@ -29,12 +29,16 @@ export const runMigrations = (db: Database) => {
// Create app_config table if it doesn't exist (for existing installations)
db.exec(APP_CONFIG_TABLE_SCHEMA_SQL);
// Create session_names table if it doesn't exist (for existing installations)
db.exec(SESSION_NAMES_TABLE_SCHEMA_SQL);
// Create sessions table if it doesn't exist (for existing installations)
db.exec(SESSIONS_TABLE_SCHEMA_SQL);
db.exec(
"CREATE INDEX IF NOT EXISTS idx_session_names_lookup ON session_names(session_id, provider)",
"CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id)"
);
db.exec(WORK_SPACE_PATH_SQL);
db.exec(LAST_SCANNED_AT_SQL);
logger.info("Database migrations completed successfully");
} catch (error: any) {

View File

@@ -0,0 +1,38 @@
import { getConnection } from "@/shared/database/connection.js";
import { ScanStateRow } from "@/shared/database/types.js";
export const scanStateDb = {
getLastScannedAt() {
const db = getConnection();
const row = db
.prepare(`SELECT last_scanned_at FROM scan_state WHERE id = 1`)
.get() as ScanStateRow;
if (!row) {
return null; // Before any scan, the row is undefined.
}
let lastScannedDate: Date | null = null;
const lastScannedStr = row.last_scanned_at;
if (lastScannedStr) {
// SQLite CURRENT_TIMESTAMP returns UTC in "YYYY-MM-DD HH:MM:SS" format.
// Replace space with 'T' and append 'Z' to parse reliably in JS across all platforms.
lastScannedDate = new Date(lastScannedStr.replace(' ', 'T') + 'Z');
}
return lastScannedDate;
},
updateLastScannedAt() {
const db = getConnection();
db.prepare(`
INSERT INTO scan_state (id, last_scanned_at)
VALUES (1, CURRENT_TIMESTAMP)
ON CONFLICT (id)
DO UPDATE SET last_scanned_at = CURRENT_TIMESTAMP
`).run();
}
};

View File

@@ -1,109 +0,0 @@
/**
* Session names repository.
*
* Manages custom display names for provider sessions. When a user
* renames a chat session in the UI, the override is stored here
* and applied on top of the CLI-generated summary.
*/
import { getConnection } from '@/shared/database/connection.js';
import type {
SessionNameLookupRow,
SessionWithSummary,
} from '@/shared/database/types.js';
import { logger } from '@/shared/utils/logger.js';
// ---------------------------------------------------------------------------
// Queries
// ---------------------------------------------------------------------------
export const sessionNamesDb = {
/** Inserts or updates a custom session name (upsert on session_id + provider). */
setName(sessionId: string, provider: string, customName: string): void {
const db = getConnection();
db.prepare(
`INSERT INTO session_names (session_id, provider, custom_name)
VALUES (?, ?, ?)
ON CONFLICT(session_id, provider)
DO UPDATE SET custom_name = excluded.custom_name,
updated_at = CURRENT_TIMESTAMP`
).run(sessionId, provider, customName);
},
/** Returns the custom name for a single session, or null if unset. */
getName(sessionId: string, provider: string): string | null {
const db = getConnection();
const row = db
.prepare(
'SELECT custom_name FROM session_names WHERE session_id = ? AND provider = ?'
)
.get(sessionId, provider) as { custom_name: string } | undefined;
return row?.custom_name ?? null;
},
/**
* Batch lookup for multiple session IDs.
* Returns a Map<sessionId, customName> for efficient overlay onto session lists.
*/
getNames(sessionIds: string[], provider: string): Map<string, string> {
if (sessionIds.length === 0) return new Map();
const db = getConnection();
const placeholders = sessionIds.map(() => '?').join(',');
const rows = db
.prepare(
`SELECT session_id, custom_name FROM session_names
WHERE session_id IN (${placeholders}) AND provider = ?`
)
.all(...sessionIds, provider) as SessionNameLookupRow[];
return new Map(rows.map((r) => [r.session_id, r.custom_name]));
},
/** Removes a custom session name. Returns true if a row was deleted. */
deleteName(sessionId: string, provider: string): boolean {
const db = getConnection();
return (
db
.prepare(
'DELETE FROM session_names WHERE session_id = ? AND provider = ?'
)
.run(sessionId, provider).changes > 0
);
},
};
// ---------------------------------------------------------------------------
// Session overlay helper
// ---------------------------------------------------------------------------
/**
* Overlays custom session names from the database onto a list of sessions.
* Mutates each session's `summary` field in-place when a custom name exists.
*
* This is the typed equivalent of the legacy `applyCustomSessionNames` function.
* Non-fatal: logs a warning on failure instead of throwing.
*/
export function applyCustomSessionNames(
sessions: SessionWithSummary[] | undefined | null,
provider: string
): void {
if (!sessions?.length) return;
try {
const ids = sessions.map((s) => s.id);
const customNames = sessionNamesDb.getNames(ids, provider);
for (const session of sessions) {
const custom = customNames.get(session.id);
if (custom) {
session.summary = custom;
}
}
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
logger.warn(`Failed to apply custom session names for ${provider}`, {
error: message,
});
}
}

View File

@@ -0,0 +1,87 @@
import { workspaceOriginalPathsDb } from '@/shared/database/repositories/workspace-original-paths.db.js';
import { getConnection } from '@/shared/database/connection.js';
import type {
SessionNameLookupRow,
SessionWithSummary,
} from '@/shared/database/types.js';
import { logger } from '@/shared/utils/logger.js';
// ---------------------------------------------------------------------------
// Queries
// ---------------------------------------------------------------------------
export const sessionsDb = {
createSession(session_id: string, provider: string, workspacePath: string, customName?: string): void {
const db = getConnection();
// First, ensure the workspace path is recorded in the workspace_original_paths table
// since it's a foreign key in the sessions table.
workspaceOriginalPathsDb.createWorkspacePath(workspacePath);
db.prepare(
'INSERT OR IGNORE INTO sessions (session_id, provider, custom_name, workspace_path) VALUES (?, ?, ?, ?)'
).run(session_id, provider, customName, workspacePath);
},
deleteSession(session_id: string): void {
const db = getConnection();
db.prepare('DELETE FROM sessions WHERE session_id = ?').run(session_id);
}
// /** Inserts or updates a custom session name (upsert on session_id + provider). */
// setName(sessionId: string, provider: string, customName: string): void {
// const db = getConnection();
// db.prepare(
// `INSERT INTO session_names (session_id, provider, custom_name)
// VALUES (?, ?, ?)
// ON CONFLICT(session_id, provider)
// DO UPDATE SET custom_name = excluded.custom_name,
// updated_at = CURRENT_TIMESTAMP`
// ).run(sessionId, provider, customName);
// },
/** Returns the custom name for a single session, or null if unset. */
// getName(sessionId: string, provider: string): string | null {
// const db = getConnection();
// const row = db
// .prepare(
// 'SELECT custom_name FROM session_names WHERE session_id = ? AND provider = ?'
// )
// .get(sessionId, provider) as { custom_name: string } | undefined;
// return row?.custom_name ?? null;
// },
/**
* Batch lookup for multiple session IDs.
* Returns a Map<sessionId, customName> for efficient overlay onto session lists.
*/
// getNames(sessionIds: string[], provider: string): Map<string, string> {
// if (sessionIds.length === 0) return new Map();
// const db = getConnection();
// const placeholders = sessionIds.map(() => '?').join(',');
// const rows = db
// .prepare(
// `SELECT session_id, custom_name FROM session_names
// WHERE session_id IN (${placeholders}) AND provider = ?`
// )
// .all(...sessionIds, provider) as SessionNameLookupRow[];
// return new Map(rows.map((r) => [r.session_id, r.custom_name]));
// },
/** Removes a custom session name. Returns true if a row was deleted. */
// deleteName(sessionId: string, provider: string): boolean {
// const db = getConnection();
// return (
// db
// .prepare(
// 'DELETE FROM session_names WHERE session_id = ? AND provider = ?'
// )
// .run(sessionId, provider).changes > 0
// );
// },
};

View File

@@ -0,0 +1,12 @@
import { getConnection } from '@/shared/database/connection.js';
export const workspaceOriginalPathsDb = {
createWorkspacePath(workspacePath: string): void {
const db = getConnection();
db.prepare(`
INSERT INTO workspace_original_paths (workspace_path)
VALUES (?)
ON CONFLICT(workspace_path) DO NOTHING
`).run(workspacePath);
},
}

View File

@@ -39,18 +39,31 @@ CREATE TABLE IF NOT EXISTS user_credentials (
);
`;
export const SESSION_NAMES_TABLE_SCHEMA_SQL = `
CREATE TABLE IF NOT EXISTS session_names (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
provider TEXT NOT NULL DEFAULT 'claude',
custom_name TEXT NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
UNIQUE(session_id, provider)
export const SESSIONS_TABLE_SCHEMA_SQL = `
CREATE TABLE IF NOT EXISTS sessions (
session_id TEXT PRIMARY KEY NOT NULL,
provider TEXT NOT NULL,
custom_name TEXT,
workspace_path TEXT NOT NULL,
FOREIGN KEY (workspace_path) REFERENCES workspace_original_paths(workspace_path)
ON DELETE CASCADE
ON UPDATE CASCADE
);
`;
export const WORK_SPACE_PATH_SQL = `
CREATE TABLE IF NOT EXISTS workspace_original_paths (
workspace_path TEXT PRIMARY KEY NOT NULL
);
`
export const LAST_SCANNED_AT_SQL = `
CREATE TABLE IF NOT EXISTS scan_state (
id INTEGER PRIMARY KEY CHECK (id = 1),
last_scanned_at TIMESTAMP NULL
);
`
export const APP_CONFIG_TABLE_SCHEMA_SQL = `
CREATE TABLE IF NOT EXISTS app_config (
key TEXT PRIMARY KEY,
@@ -70,20 +83,21 @@ CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
CREATE INDEX IF NOT EXISTS idx_users_active ON users(is_active);
${API_KEYS_TABLE_SCHEMA_SQL}
CREATE INDEX IF NOT EXISTS idx_api_keys_key ON api_keys(api_key);
CREATE INDEX IF NOT EXISTS idx_api_keys_user_id ON api_keys(user_id);
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active);
${USER_CREDENTIALS_TABLE_SCHEMA_SQL}
CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id);
CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type);
CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active);
${SESSION_NAMES_TABLE_SCHEMA_SQL}
${SESSIONS_TABLE_SCHEMA_SQL}
CREATE INDEX IF NOT EXISTS idx_session_ids_lookup ON sessions(session_id);
CREATE INDEX IF NOT EXISTS idx_session_names_lookup ON session_names(session_id, provider);
${WORK_SPACE_PATH_SQL}
${LAST_SCANNED_AT_SQL}
${APP_CONFIG_TABLE_SCHEMA_SQL}
`;

View File

@@ -6,6 +6,8 @@
* from SELECT queries; input types represent what goes into INSERT/UPDATE.
*/
import { LLMProvider } from "@/shared/types/app.js";
// ---------------------------------------------------------------------------
// Users
// ---------------------------------------------------------------------------
@@ -94,17 +96,15 @@ export type CreateCredentialResult = {
// Session Names
// ---------------------------------------------------------------------------
export type SessionNameRow = {
id: number;
export type SessionsRow = {
session_id: string;
provider: string;
provider: LLMProvider;
workspacePath: string;
custom_name: string;
created_at: string;
updated_at: string;
};
/** Minimal shape used in batch lookups. */
export type SessionNameLookupRow = Pick<SessionNameRow, 'session_id' | 'custom_name'>;
export type SessionNameLookupRow = Pick<SessionsRow, 'session_id' | 'custom_name'>;
/**
* Any object that has an `id` and `summary` field.
@@ -116,6 +116,16 @@ export type SessionWithSummary = {
[key: string]: unknown;
};
// ---------------------------------------------------------------------------
// Scan State
// ---------------------------------------------------------------------------
export type ScanStateRow = {
last_scanned_at: string;
}
// ---------------------------------------------------------------------------
// App Config
// ---------------------------------------------------------------------------

View File

@@ -1,7 +1,12 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { arePathsEquivalent, normalizePathForPlatform, toPortablePath } from './path.js';
import {
arePathsEquivalent,
normalizeComparablePath,
normalizePathForPlatform,
toPortablePath,
} from './path.js';
// This test verifies path strings can be normalized for logs and platform-specific execution.
test('path helpers normalize separators in both directions', () => {
@@ -27,3 +32,15 @@ test('arePathsEquivalent follows the case rules of the target platform', () => {
false,
);
});
// This test verifies path comparison keys stay stable across long-path prefixes and dot segments.
test('normalizeComparablePath resolves paths using the target platform rules', () => {
assert.equal(
normalizeComparablePath('\\\\?\\C:\\Repo\\..\\Repo\\File.txt', 'windows'),
'c:\\repo\\file.txt',
);
assert.equal(
normalizeComparablePath('/repo/../repo/File.txt', 'linux'),
'/repo/File.txt',
);
});

View File

@@ -17,18 +17,49 @@ export function normalizePathForPlatform(
return value.replace(/[\\/]+/g, separator);
}
/**
* Normalizes a path into a stable comparison key for the target platform.
*
* This helper intentionally does more than separator normalization:
* it trims incidental whitespace, removes the Windows long-path prefix when
* present, resolves `.` and `..`, and applies the platform's case rules.
*
* The return value is meant for equality checks, map keys, and de-duplication.
* It should not be used as a display string because Windows casing is lowered
* on purpose to preserve case-insensitive comparisons.
*/
export function normalizeComparablePath(
value: string,
platform: RuntimePlatform = resolveRuntimePlatform(),
): string {
if (typeof value !== 'string') {
return '';
}
const trimmedValue = value.trim();
if (trimmedValue.length === 0) {
return '';
}
const withoutLongPathPrefix = trimmedValue.startsWith('\\\\?\\')
? trimmedValue.slice(4)
: trimmedValue;
// This branch resolves paths using the target platform instead of the host OS.
const pathModule = isWindowsPlatform(platform) ? path.win32 : path.posix;
const normalizedInput = normalizePathForPlatform(withoutLongPathPrefix, platform);
const resolvedPath = pathModule.resolve(pathModule.normalize(normalizedInput));
return isWindowsPlatform(platform)
? resolvedPath.toLowerCase()
: resolvedPath;
}
// This helper compares paths using the case-sensitivity rules of the target platform.
export function arePathsEquivalent(
left: string,
right: string,
platform: RuntimePlatform = resolveRuntimePlatform(),
): boolean {
// This branch uses the target platform's path semantics instead of the host machine's semantics.
const pathModule = isWindowsPlatform(platform) ? path.win32 : path.posix;
const normalizedLeft = pathModule.normalize(normalizePathForPlatform(left, platform));
const normalizedRight = pathModule.normalize(normalizePathForPlatform(right, platform));
return isWindowsPlatform(platform)
? normalizedLeft.toLowerCase() === normalizedRight.toLowerCase()
: normalizedLeft === normalizedRight;
return normalizeComparablePath(left, platform) === normalizeComparablePath(right, platform);
}

View File

@@ -18,3 +18,6 @@ export type ServerApplication = {
runtimePaths: RuntimePaths;
start: () => Promise<void>;
};
// ---------------------------------------------------------------------------
export type LLMProvider = 'claude' | 'codex' | 'cursor' | 'gemini';