mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-05-01 18:28:38 +00:00
refactor: modularize project services, and wizard create/clone flow
Restructure project creation, listing, GitHub clone progress, and TaskMaster details behind a dedicated TypeScript module under server/modules/projects/, and align the client wizard with a single path-based flow. Server / routing - Remove server/routes/projects.js and mount server/modules/projects/ projects.routes.ts at /api/projects (still behind authenticateToken). - Drop duplicate handlers from server/index.js for GET /api/projects and GET /api/projects/:projectId/taskmaster; those live on the new router. - Import WORKSPACES_ROOT and validateWorkspacePath from shared utils in index.js instead of the deleted projects route module. Projects router (projects.routes.ts) - GET /: list projects with sessions (existing snapshot behavior). - POST /create-project: validate body, reject legacy workspaceType and mixed clone fields, delegate to createProject service, return distinct success copy when an archived path is reactivated. - GET /clone-progress: Server-Sent Events for clone progress/complete/error; requires authenticated user id for token resolution; wires startCloneProject. - GET /:projectId/taskmaster: delegates to getProjectTaskMaster. Services (new) - project-management.service.ts: path validation, workspace directory creation, persistence via projectsDb.createProjectPath, mapping to API project shape; surfaces AppError for validation, conflict, and not-found cases; optional dependency injection for tests. - project-clone.service.ts: validates workspace, resolves GitHub auth (stored token or inline token), runs git clone with progress callbacks, registers project via createProject on success; sanitizes errors and supports cancellation; injectable dependencies for tests. - projects-has-taskmaster.service.ts: moves TaskMaster detection and normalization out of server/projects.js; resolve-by-id and public getProjectTaskMaster with structured AppError responses. Persistence and shared types - projectsDb.createProjectPath now returns CreateProjectPathResult (created | reactivated_archived | active_conflict) using INSERT … ON CONFLICT with selective update when the row is archived; normalizes display name from path or custom name; repository row typing moves to shared ProjectRepositoryRow. - getProjectPaths() returns only non-archived rows (isArchived = 0). - shared/types.ts: ProjectRepositoryRow, CreateProjectPathResult/outcome, WorkspacePathValidationResult. - shared/utils.ts: WORKSPACES_ROOT, forbidden path lists, validateWorkspacePath, asyncHandler for Express async routes. Legacy cleanup - server/projects.js: remove detectTaskMasterFolder, normalizeTaskMasterInfo, and getProjectTaskMasterById (logic lives in the new service). - server/routes/agent.js: register external API project paths with projectsDb.createProjectPath instead of addProjectManually try/catch; treat active_conflict as an existing registration and continue. Tests - Add Node test suites for project-management, project-clone, and projects-has-taskmaster services; update projects.service test import for renamed projects-with-sessions-fetch.service.ts. Rename - projects.service.ts → projects-with-sessions-fetch.service.ts; re-export from modules/projects/index.ts. Client (project creation wizard) - Remove StepTypeSelection and workspaceType from form state and types; wizard is two steps (configure path/GitHub auth, then review). - createWorkspaceRequest → createProjectRequest; clone vs create-only inferred from githubUrl (pathUtils / isCloneWorkflow). - Adjust step indices, WizardProgress, StepConfiguration/Review, WorkspacePathField, and src/utils/api.js as needed for the new API. Docs - Minor websocket README touch-up. Net: ~1.6k insertions / ~0.9k deletions across 29 files; behavior is centralized in typed services with explicit HTTP errors and test seams.
This commit is contained in:
@@ -11,9 +11,8 @@ import express from 'express';
|
||||
import cors from 'cors';
|
||||
import mime from 'mime-types';
|
||||
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
import { AppError, WORKSPACES_ROOT, validateWorkspacePath } from '@/shared/utils.js';
|
||||
import { closeSessionsWatcher, initializeSessionsWatcher } from '@/modules/providers/index.js';
|
||||
import { getProjectsWithSessions } from '@/modules/projects/index.js';
|
||||
import { createWebSocketServer } from '@/modules/websocket/index.js';
|
||||
|
||||
import { getConnectableHost } from '../shared/networkHosts.js';
|
||||
@@ -24,7 +23,6 @@ import {
|
||||
renameProjectById,
|
||||
deleteSessionById,
|
||||
deleteProjectById,
|
||||
getProjectTaskMasterById,
|
||||
getProjectPathById,
|
||||
searchConversations,
|
||||
} from './projects.js';
|
||||
@@ -70,7 +68,7 @@ import mcpUtilsRoutes from './routes/mcp-utils.js';
|
||||
import commandsRoutes from './routes/commands.js';
|
||||
import settingsRoutes from './routes/settings.js';
|
||||
import agentRoutes from './routes/agent.js';
|
||||
import projectsRoutes, { WORKSPACES_ROOT, validateWorkspacePath } from './routes/projects.js';
|
||||
import projectModuleRoutes from './modules/projects/projects.routes.js';
|
||||
import userRoutes from './routes/user.js';
|
||||
import codexRoutes from './routes/codex.js';
|
||||
import geminiRoutes from './routes/gemini.js';
|
||||
@@ -167,7 +165,7 @@ app.use('/api', validateApiKey);
|
||||
app.use('/api/auth', authRoutes);
|
||||
|
||||
// Projects API Routes (protected)
|
||||
app.use('/api/projects', authenticateToken, projectsRoutes);
|
||||
app.use('/api/projects', authenticateToken, projectModuleRoutes);
|
||||
|
||||
// Git API Routes (protected)
|
||||
app.use('/api/git', authenticateToken, gitRoutes);
|
||||
@@ -305,29 +303,6 @@ app.post('/api/system/update', authenticateToken, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/projects', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const projects = await getProjectsWithSessions();
|
||||
res.json(projects);
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Project-scoped TaskMaster details; identified by DB-assigned `projectId`.
|
||||
app.get('/api/projects/:projectId/taskmaster', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { projectId } = req.params;
|
||||
const taskMasterDetails = await getProjectTaskMasterById(projectId);
|
||||
if (!taskMasterDetails) {
|
||||
return res.status(404).json({ error: 'Project not found' });
|
||||
}
|
||||
res.json(taskMasterDetails);
|
||||
} catch (error) {
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// Sessions for a project; `projectId` is resolved to a path via the DB.
|
||||
app.get('/api/projects/:projectId/sessions', authenticateToken, async (req, res) => {
|
||||
try {
|
||||
@@ -1646,7 +1621,6 @@ async function startServer() {
|
||||
// Start watching the projects folder for changes
|
||||
await initializeSessionsWatcher();
|
||||
|
||||
// await getProjectsWithSessions(); // TODO: REMOVE THIS
|
||||
// Start server-side plugin processes for enabled plugins
|
||||
startEnabledPluginServers().catch(err => {
|
||||
console.error('[Plugins] Error during startup:', err.message);
|
||||
|
||||
@@ -1,48 +1,64 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import path from 'node:path';
|
||||
|
||||
import { getConnection } from '@/modules/database/connection.js';
|
||||
import type { CreateProjectPathResult, ProjectRepositoryRow } from '@/shared/types.js';
|
||||
|
||||
type ProjectRow = {
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name: string | null;
|
||||
isStarred: number;
|
||||
isArchived: number;
|
||||
};
|
||||
function normalizeProjectDisplayName(projectPath: string, customProjectName: string | null): string {
|
||||
const trimmedCustomName = typeof customProjectName === 'string' ? customProjectName.trim() : '';
|
||||
if (trimmedCustomName.length > 0) {
|
||||
return trimmedCustomName;
|
||||
}
|
||||
|
||||
const directoryName = path.basename(projectPath);
|
||||
return directoryName || projectPath;
|
||||
}
|
||||
|
||||
export const projectsDb = {
|
||||
createProjectPath(projectPath: string, customProjectName: string | null = null): void {
|
||||
createProjectPath(projectPath: string, customProjectName: string | null = null): CreateProjectPathResult {
|
||||
const db = getConnection();
|
||||
db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name)
|
||||
VALUES (?, ?, ?)
|
||||
const normalizedProjectName = normalizeProjectDisplayName(projectPath, customProjectName);
|
||||
const row = db.prepare(`
|
||||
INSERT INTO projects (project_id, project_path, custom_project_name, isArchived)
|
||||
VALUES (?, ?, ?, 0)
|
||||
ON CONFLICT(project_path) DO UPDATE SET
|
||||
custom_project_name = CASE
|
||||
WHEN projects.custom_project_name IS NULL OR projects.custom_project_name = ''
|
||||
THEN excluded.custom_project_name
|
||||
ELSE projects.custom_project_name
|
||||
END
|
||||
`).run(randomUUID(), projectPath, customProjectName);
|
||||
isArchived = 0
|
||||
WHERE projects.isArchived = 1
|
||||
RETURNING project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
`).get(randomUUID(), projectPath, normalizedProjectName) as ProjectRepositoryRow | undefined;
|
||||
|
||||
if (row) {
|
||||
return {
|
||||
outcome: row.isArchived === 1 ? 'reactivated_archived' : 'created',
|
||||
project: row,
|
||||
};
|
||||
}
|
||||
|
||||
const existingProject = projectsDb.getProjectPath(projectPath);
|
||||
return {
|
||||
outcome: 'active_conflict',
|
||||
project: existingProject,
|
||||
};
|
||||
},
|
||||
|
||||
getProjectPath(projectPath: string): ProjectRow | null {
|
||||
getProjectPath(projectPath: string): ProjectRepositoryRow | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(projectPath) as ProjectRow | undefined;
|
||||
`).get(projectPath) as ProjectRepositoryRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getProjectById(projectId: string): ProjectRow | null {
|
||||
getProjectById(projectId: string): ProjectRepositoryRow | null {
|
||||
const db = getConnection();
|
||||
const row = db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
WHERE project_id = ?
|
||||
`).get(projectId) as ProjectRow | undefined;
|
||||
`).get(projectId) as ProjectRepositoryRow | undefined;
|
||||
|
||||
return row ?? null;
|
||||
},
|
||||
@@ -61,17 +77,18 @@ export const projectsDb = {
|
||||
SELECT project_path
|
||||
FROM projects
|
||||
WHERE project_id = ?
|
||||
`).get(projectId) as Pick<ProjectRow, 'project_path'> | undefined;
|
||||
`).get(projectId) as Pick<ProjectRepositoryRow, 'project_path'> | undefined;
|
||||
|
||||
return row?.project_path ?? null;
|
||||
},
|
||||
|
||||
getProjectPaths(): ProjectRow[] {
|
||||
getProjectPaths(): ProjectRepositoryRow[] {
|
||||
const db = getConnection();
|
||||
return db.prepare(`
|
||||
SELECT project_id, project_path, custom_project_name, isStarred, isArchived
|
||||
FROM projects
|
||||
`).all() as ProjectRow[];
|
||||
WHERE isArchived = 0
|
||||
`).all() as ProjectRepositoryRow[];
|
||||
},
|
||||
|
||||
getCustomProjectName(projectPath: string): string | null {
|
||||
@@ -80,7 +97,7 @@ export const projectsDb = {
|
||||
SELECT custom_project_name
|
||||
FROM projects
|
||||
WHERE project_path = ?
|
||||
`).get(projectPath) as Pick<ProjectRow, 'custom_project_name'> | undefined;
|
||||
`).get(projectPath) as Pick<ProjectRepositoryRow, 'custom_project_name'> | undefined;
|
||||
|
||||
return row?.custom_project_name ?? null;
|
||||
},
|
||||
|
||||
@@ -3,4 +3,4 @@ export {
|
||||
generateDisplayName,
|
||||
getProjectsWithSessions,
|
||||
writeSnapshot,
|
||||
} from './services/projects.service.js';
|
||||
} from './services/projects-with-sessions-fetch.service.js';
|
||||
|
||||
169
server/modules/projects/projects.routes.ts
Normal file
169
server/modules/projects/projects.routes.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import express from 'express';
|
||||
|
||||
import { createProject } from '@/modules/projects/services/project-management.service.js';
|
||||
import { startCloneProject } from '@/modules/projects/services/project-clone.service.js';
|
||||
import { getProjectTaskMaster } from '@/modules/projects/services/projects-has-taskmaster.service.js';
|
||||
import { AppError, asyncHandler } from '@/shared/utils.js';
|
||||
import { getProjectsWithSessions } from '@/modules/projects/services/projects-with-sessions-fetch.service.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
type AuthenticatedUser = {
|
||||
id?: number | string;
|
||||
};
|
||||
|
||||
function readQueryStringValue(value: unknown): string {
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (Array.isArray(value) && typeof value[0] === 'string') {
|
||||
return value[0];
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
function readOptionalNumericQueryValue(value: unknown): number | null {
|
||||
const rawValue = readQueryStringValue(value).trim();
|
||||
if (!rawValue) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsedValue = Number.parseInt(rawValue, 10);
|
||||
return Number.isNaN(parsedValue) ? null : parsedValue;
|
||||
}
|
||||
|
||||
function resolveRouteErrorMessage(error: unknown): string {
|
||||
if (error instanceof AppError) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return 'Failed to clone repository';
|
||||
}
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
asyncHandler(async (_req, res) => {
|
||||
const projects = await getProjectsWithSessions();
|
||||
res.json(projects);
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/create-project',
|
||||
asyncHandler(async (req, res) => {
|
||||
const requestBody = req.body as Record<string, unknown>;
|
||||
const projectPath = typeof requestBody.path === 'string' ? requestBody.path : '';
|
||||
const customName = typeof requestBody.customName === 'string' ? requestBody.customName : null;
|
||||
|
||||
if (requestBody.workspaceType !== undefined) {
|
||||
throw new AppError('workspaceType is no longer supported. Use the single create-project flow.', {
|
||||
code: 'LEGACY_WORKSPACE_TYPE_UNSUPPORTED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (requestBody.githubUrl || requestBody.githubTokenId || requestBody.newGithubToken) {
|
||||
throw new AppError('Repository cloning is not supported on create-project', {
|
||||
code: 'CLONE_NOT_SUPPORTED_ON_CREATE_PROJECT',
|
||||
statusCode: 400,
|
||||
details: 'Use /api/projects/clone-progress for cloning workflows',
|
||||
});
|
||||
}
|
||||
|
||||
const projectCreationResult = await createProject({
|
||||
projectPath,
|
||||
customName,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
project: projectCreationResult.project,
|
||||
message:
|
||||
projectCreationResult.outcome === 'reactivated_archived'
|
||||
? 'Archived project path reused successfully'
|
||||
: 'Project created successfully',
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
router.get('/clone-progress', async (req, res) => {
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const sendEvent = (type: string, data: Record<string, unknown>) => {
|
||||
if (res.writableEnded) {
|
||||
return;
|
||||
}
|
||||
|
||||
res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`);
|
||||
};
|
||||
|
||||
let cloneOperation: Awaited<ReturnType<typeof startCloneProject>> | null = null;
|
||||
const closeListener = () => {
|
||||
cloneOperation?.cancel();
|
||||
};
|
||||
req.on('close', closeListener);
|
||||
|
||||
try {
|
||||
const queryParams = req.query as Record<string, unknown>;
|
||||
const workspacePath = readQueryStringValue(queryParams.path);
|
||||
const githubUrl = readQueryStringValue(queryParams.githubUrl);
|
||||
const githubTokenId = readOptionalNumericQueryValue(queryParams.githubTokenId);
|
||||
const newGithubToken = readQueryStringValue(queryParams.newGithubToken) || null;
|
||||
|
||||
const authenticatedUser = (req as typeof req & { user?: AuthenticatedUser }).user;
|
||||
const userId = authenticatedUser?.id;
|
||||
if (userId === undefined || userId === null) {
|
||||
throw new AppError('Authenticated user is required', {
|
||||
code: 'AUTHENTICATION_REQUIRED',
|
||||
statusCode: 401,
|
||||
});
|
||||
}
|
||||
|
||||
cloneOperation = await startCloneProject(
|
||||
{
|
||||
workspacePath,
|
||||
githubUrl,
|
||||
githubTokenId,
|
||||
newGithubToken,
|
||||
userId,
|
||||
},
|
||||
{
|
||||
onProgress: (message) => {
|
||||
sendEvent('progress', { message });
|
||||
},
|
||||
onComplete: ({ project, message }) => {
|
||||
sendEvent('complete', { project, message });
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
await cloneOperation.waitForCompletion;
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: resolveRouteErrorMessage(error) });
|
||||
} finally {
|
||||
req.off('close', closeListener);
|
||||
if (!res.writableEnded) {
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/:projectId/taskmaster',
|
||||
asyncHandler(async (req, res) => {
|
||||
const projectId = typeof req.params.projectId === 'string' ? req.params.projectId : '';
|
||||
const taskMasterDetails = await getProjectTaskMaster(projectId);
|
||||
res.json(taskMasterDetails);
|
||||
}),
|
||||
);
|
||||
|
||||
export default router;
|
||||
314
server/modules/projects/services/project-clone.service.ts
Normal file
314
server/modules/projects/services/project-clone.service.ts
Normal file
@@ -0,0 +1,314 @@
|
||||
import { spawn } from 'node:child_process';
|
||||
import { access, mkdir, rm } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { githubTokensDb } from '@/modules/database/index.js';
|
||||
import { createProject } from '@/modules/projects/services/project-management.service.js';
|
||||
import type { WorkspacePathValidationResult } from '@/shared/types.js';
|
||||
import { AppError, validateWorkspacePath } from '@/shared/utils.js';
|
||||
|
||||
type CloneProjectInput = {
|
||||
workspacePath: string;
|
||||
githubUrl: string;
|
||||
githubTokenId?: number | null;
|
||||
newGithubToken?: string | null;
|
||||
userId: number | string;
|
||||
};
|
||||
|
||||
type CloneCompletePayload = {
|
||||
project: Record<string, unknown>;
|
||||
message: string;
|
||||
};
|
||||
|
||||
type CloneProjectEventHandlers = {
|
||||
onProgress: (message: string) => void;
|
||||
onComplete: (payload: CloneCompletePayload) => void;
|
||||
};
|
||||
|
||||
type GitCloneProcess = {
|
||||
stdout: NodeJS.ReadableStream | null;
|
||||
stderr: NodeJS.ReadableStream | null;
|
||||
on(event: 'close', listener: (code: number | null) => void): void;
|
||||
on(event: 'error', listener: (error: NodeJS.ErrnoException) => void): void;
|
||||
kill(): void;
|
||||
};
|
||||
|
||||
type CloneProjectDependencies = {
|
||||
validatePath: (requestedPath: string) => Promise<WorkspacePathValidationResult>;
|
||||
ensureDirectory: (directoryPath: string) => Promise<void>;
|
||||
pathExists: (targetPath: string) => Promise<boolean>;
|
||||
removePath: (targetPath: string) => Promise<void>;
|
||||
getGithubTokenById: (
|
||||
tokenId: number,
|
||||
userId: number,
|
||||
) => Promise<{ github_token: string } | null>;
|
||||
spawnGitClone: (cloneUrl: string, clonePath: string) => GitCloneProcess;
|
||||
registerProject: (projectPath: string, customName: string) => Promise<{ project: Record<string, unknown> }>;
|
||||
logError: (message: string, error: unknown) => void;
|
||||
};
|
||||
|
||||
export type CloneProjectOperation = {
|
||||
waitForCompletion: Promise<void>;
|
||||
cancel: () => void;
|
||||
};
|
||||
|
||||
async function defaultPathExists(targetPath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(targetPath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function sanitizeGitError(message: string, token: string | null): string {
|
||||
if (!message || !token) {
|
||||
return message;
|
||||
}
|
||||
|
||||
const escapedToken = token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
return message.replace(new RegExp(escapedToken, 'g'), '***');
|
||||
}
|
||||
|
||||
function resolveCloneFailureMessage(lastError: string, sanitizedError: string): string {
|
||||
if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) {
|
||||
return 'Authentication failed. Please check your credentials.';
|
||||
}
|
||||
|
||||
if (lastError.includes('Repository not found')) {
|
||||
return 'Repository not found. Please check the URL and ensure you have access.';
|
||||
}
|
||||
|
||||
if (lastError.includes('already exists')) {
|
||||
return 'Directory already exists';
|
||||
}
|
||||
|
||||
if (sanitizedError) {
|
||||
return sanitizedError;
|
||||
}
|
||||
|
||||
return 'Git clone failed';
|
||||
}
|
||||
|
||||
function resolveErrorMessage(error: unknown): string {
|
||||
if (error instanceof AppError) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return 'Unexpected error';
|
||||
}
|
||||
|
||||
const defaultDependencies: CloneProjectDependencies = {
|
||||
validatePath: validateWorkspacePath,
|
||||
ensureDirectory: async (directoryPath: string): Promise<void> => {
|
||||
await mkdir(directoryPath, { recursive: true });
|
||||
},
|
||||
pathExists: defaultPathExists,
|
||||
removePath: async (targetPath: string): Promise<void> => {
|
||||
await rm(targetPath, { recursive: true, force: true });
|
||||
},
|
||||
getGithubTokenById: async (
|
||||
tokenId: number,
|
||||
userId: number,
|
||||
): Promise<{ github_token: string } | null> => {
|
||||
const tokenRow = githubTokensDb.getGithubTokenById(userId, tokenId) as
|
||||
| { github_token: string }
|
||||
| null;
|
||||
return tokenRow;
|
||||
},
|
||||
spawnGitClone: (cloneUrl: string, clonePath: string): GitCloneProcess =>
|
||||
spawn('git', ['clone', '--progress', cloneUrl, clonePath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0',
|
||||
},
|
||||
}) as unknown as GitCloneProcess,
|
||||
registerProject: async (
|
||||
projectPath: string,
|
||||
customName: string,
|
||||
): Promise<{ project: Record<string, unknown> }> =>
|
||||
createProject({
|
||||
projectPath,
|
||||
customName,
|
||||
}) as Promise<{ project: Record<string, unknown> }>,
|
||||
logError: (message: string, error: unknown): void => {
|
||||
console.error(message, error);
|
||||
},
|
||||
};
|
||||
|
||||
export async function startCloneProject(
|
||||
input: CloneProjectInput,
|
||||
handlers: CloneProjectEventHandlers,
|
||||
dependencies: CloneProjectDependencies = defaultDependencies,
|
||||
): Promise<CloneProjectOperation> {
|
||||
const normalizedWorkspacePath = input.workspacePath.trim();
|
||||
const normalizedGithubUrl = input.githubUrl.trim();
|
||||
|
||||
if (!normalizedWorkspacePath) {
|
||||
throw new AppError('workspacePath and githubUrl are required', {
|
||||
code: 'WORKSPACE_PATH_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
if (!normalizedGithubUrl) {
|
||||
throw new AppError('workspacePath and githubUrl are required', {
|
||||
code: 'GITHUB_URL_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const pathValidation = await dependencies.validatePath(normalizedWorkspacePath);
|
||||
if (!pathValidation.valid || !pathValidation.resolvedPath) {
|
||||
throw new AppError(pathValidation.error || 'Invalid workspace path', {
|
||||
code: 'INVALID_PROJECT_PATH',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const absolutePath = pathValidation.resolvedPath;
|
||||
await dependencies.ensureDirectory(absolutePath);
|
||||
|
||||
let githubToken: string | null = null;
|
||||
if (typeof input.githubTokenId === 'number') {
|
||||
const numericUserId =
|
||||
typeof input.userId === 'number' ? input.userId : Number.parseInt(String(input.userId), 10);
|
||||
if (Number.isNaN(numericUserId)) {
|
||||
throw new AppError('Authenticated user is required', {
|
||||
code: 'AUTHENTICATION_REQUIRED',
|
||||
statusCode: 401,
|
||||
});
|
||||
}
|
||||
|
||||
const token = await dependencies.getGithubTokenById(input.githubTokenId, numericUserId);
|
||||
if (!token) {
|
||||
throw new AppError('GitHub token not found', {
|
||||
code: 'GITHUB_TOKEN_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
githubToken = token.github_token;
|
||||
} else if (input.newGithubToken && input.newGithubToken.trim().length > 0) {
|
||||
githubToken = input.newGithubToken.trim();
|
||||
}
|
||||
|
||||
const sanitizedGithubUrl = normalizedGithubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = sanitizedGithubUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
if (await dependencies.pathExists(clonePath)) {
|
||||
throw new AppError(
|
||||
`Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.`,
|
||||
{
|
||||
code: 'CLONE_TARGET_ALREADY_EXISTS',
|
||||
statusCode: 409,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let cloneUrl = normalizedGithubUrl;
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(normalizedGithubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch {
|
||||
// SSH URLs cannot be represented by URL constructor and are used as-is.
|
||||
}
|
||||
}
|
||||
|
||||
handlers.onProgress(`Cloning into '${repoName}'...`);
|
||||
const gitProcess = dependencies.spawnGitClone(cloneUrl, clonePath);
|
||||
let lastError = '';
|
||||
|
||||
gitProcess.stdout?.on('data', (data: Buffer | string) => {
|
||||
const message = data.toString().trim();
|
||||
if (message) {
|
||||
handlers.onProgress(message);
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.stderr?.on('data', (data: Buffer | string) => {
|
||||
const message = data.toString().trim();
|
||||
lastError = message;
|
||||
if (message) {
|
||||
handlers.onProgress(message);
|
||||
}
|
||||
});
|
||||
|
||||
const waitForCompletion = new Promise<void>((resolve, reject) => {
|
||||
gitProcess.on('close', async (code) => {
|
||||
if (code === 0) {
|
||||
try {
|
||||
const createdProject = await dependencies.registerProject(clonePath, repoName);
|
||||
handlers.onComplete({
|
||||
project: createdProject.project,
|
||||
message: 'Repository cloned successfully',
|
||||
});
|
||||
resolve();
|
||||
} catch (error) {
|
||||
reject(
|
||||
new AppError(`Clone succeeded but failed to add project: ${resolveErrorMessage(error)}`, {
|
||||
code: 'CLONE_PROJECT_REGISTRATION_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const sanitizedError = sanitizeGitError(lastError, githubToken);
|
||||
const errorMessage = resolveCloneFailureMessage(lastError, sanitizedError);
|
||||
|
||||
try {
|
||||
await dependencies.removePath(clonePath);
|
||||
} catch (cleanupError) {
|
||||
dependencies.logError('Failed to clean up after clone failure:', cleanupError);
|
||||
}
|
||||
|
||||
reject(
|
||||
new AppError(errorMessage, {
|
||||
code: 'GIT_CLONE_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
reject(
|
||||
new AppError('Git is not installed or not in PATH', {
|
||||
code: 'GIT_NOT_FOUND',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
reject(
|
||||
new AppError(error.message, {
|
||||
code: 'GIT_EXECUTION_FAILED',
|
||||
statusCode: 500,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
waitForCompletion,
|
||||
cancel: () => {
|
||||
gitProcess.kill();
|
||||
},
|
||||
};
|
||||
}
|
||||
142
server/modules/projects/services/project-management.service.ts
Normal file
142
server/modules/projects/services/project-management.service.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import type {
|
||||
CreateProjectPathResult,
|
||||
ProjectRepositoryRow,
|
||||
WorkspacePathValidationResult,
|
||||
} from '@/shared/types.js';
|
||||
import { AppError, validateWorkspacePath } from '@/shared/utils.js';
|
||||
|
||||
type CreateProjectInput = {
|
||||
projectPath: string;
|
||||
customName?: string | null;
|
||||
};
|
||||
|
||||
type CreateProjectDependencies = {
|
||||
validatePath: (projectPath: string) => Promise<WorkspacePathValidationResult>;
|
||||
ensureWorkspaceDirectory: (projectPath: string) => Promise<void>;
|
||||
persistProjectPath: (projectPath: string, customName: string | null) => CreateProjectPathResult;
|
||||
getProjectByPath: (projectPath: string) => ProjectRepositoryRow | null;
|
||||
};
|
||||
|
||||
type ProjectApiView = {
|
||||
projectId: string;
|
||||
path: string;
|
||||
fullPath: string;
|
||||
displayName: string;
|
||||
customName: string | null;
|
||||
isArchived: boolean;
|
||||
isStarred: boolean;
|
||||
sessions: [];
|
||||
cursorSessions: [];
|
||||
codexSessions: [];
|
||||
geminiSessions: [];
|
||||
sessionMeta: {
|
||||
hasMore: false;
|
||||
total: 0;
|
||||
};
|
||||
};
|
||||
|
||||
type CreateProjectServiceResult = {
|
||||
outcome: 'created' | 'reactivated_archived';
|
||||
project: ProjectApiView;
|
||||
};
|
||||
|
||||
const defaultDependencies: CreateProjectDependencies = {
|
||||
validatePath: validateWorkspacePath,
|
||||
ensureWorkspaceDirectory: async (projectPath: string): Promise<void> => {
|
||||
await fs.mkdir(projectPath, { recursive: true });
|
||||
const directoryStats = await fs.stat(projectPath);
|
||||
if (!directoryStats.isDirectory()) {
|
||||
throw new AppError('Path exists but is not a directory', {
|
||||
code: 'PROJECT_PATH_NOT_DIRECTORY',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
},
|
||||
persistProjectPath: (projectPath: string, customName: string | null): CreateProjectPathResult =>
|
||||
projectsDb.createProjectPath(projectPath, customName),
|
||||
getProjectByPath: (projectPath: string): ProjectRepositoryRow | null =>
|
||||
projectsDb.getProjectPath(projectPath),
|
||||
};
|
||||
|
||||
function resolveDisplayName(customName: string | null | undefined, projectPath: string): string {
|
||||
const trimmedCustomName = typeof customName === 'string' ? customName.trim() : '';
|
||||
if (trimmedCustomName.length > 0) {
|
||||
return trimmedCustomName;
|
||||
}
|
||||
|
||||
return path.basename(projectPath) || projectPath;
|
||||
}
|
||||
|
||||
function mapProjectRowToApiView(projectRow: ProjectRepositoryRow): ProjectApiView {
|
||||
return {
|
||||
projectId: projectRow.project_id,
|
||||
path: projectRow.project_path,
|
||||
fullPath: projectRow.project_path,
|
||||
displayName: resolveDisplayName(projectRow.custom_project_name, projectRow.project_path),
|
||||
customName: projectRow.custom_project_name,
|
||||
isArchived: Boolean(projectRow.isArchived),
|
||||
isStarred: Boolean(projectRow.isStarred),
|
||||
sessions: [],
|
||||
cursorSessions: [],
|
||||
codexSessions: [],
|
||||
geminiSessions: [],
|
||||
sessionMeta: {
|
||||
hasMore: false,
|
||||
total: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function createProject(
|
||||
input: CreateProjectInput,
|
||||
dependencies: CreateProjectDependencies = defaultDependencies,
|
||||
): Promise<CreateProjectServiceResult> {
|
||||
const normalizedPath = (input.projectPath || '').trim();
|
||||
if (!normalizedPath) {
|
||||
throw new AppError('path is required', {
|
||||
code: 'PROJECT_PATH_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const pathValidation = await dependencies.validatePath(normalizedPath);
|
||||
if (!pathValidation.valid || !pathValidation.resolvedPath) {
|
||||
throw new AppError('Invalid project path', {
|
||||
code: 'INVALID_PROJECT_PATH',
|
||||
statusCode: 400,
|
||||
details: pathValidation.error ?? 'Path validation failed',
|
||||
});
|
||||
}
|
||||
|
||||
const resolvedProjectPath = pathValidation.resolvedPath;
|
||||
await dependencies.ensureWorkspaceDirectory(resolvedProjectPath);
|
||||
|
||||
const normalizedCustomName = resolveDisplayName(input.customName ?? null, resolvedProjectPath);
|
||||
const persistedProject = dependencies.persistProjectPath(resolvedProjectPath, normalizedCustomName);
|
||||
|
||||
if (persistedProject.outcome === 'active_conflict') {
|
||||
throw new AppError('Project path already exists and is active', {
|
||||
code: 'PROJECT_ALREADY_EXISTS',
|
||||
statusCode: 409,
|
||||
details: `Project path already exists: ${resolvedProjectPath}`,
|
||||
});
|
||||
}
|
||||
|
||||
const projectRow = persistedProject.project ?? dependencies.getProjectByPath(resolvedProjectPath);
|
||||
if (!projectRow) {
|
||||
throw new AppError('Failed to resolve project after creation', {
|
||||
code: 'PROJECT_CREATE_FAILED',
|
||||
statusCode: 500,
|
||||
});
|
||||
}
|
||||
|
||||
// Archived rows intentionally remain archived when reused, as requested.
|
||||
return {
|
||||
outcome: persistedProject.outcome,
|
||||
project: mapProjectRowToApiView(projectRow),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,248 @@
|
||||
import { access, readFile, stat } from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { projectsDb } from '@/modules/database/index.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type TaskMasterTask = {
|
||||
status?: string;
|
||||
subtasks?: Array<{
|
||||
status?: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
type TaskMasterMetadata =
|
||||
| {
|
||||
taskCount: number;
|
||||
subtaskCount: number;
|
||||
completed: number;
|
||||
pending: number;
|
||||
inProgress: number;
|
||||
review: number;
|
||||
completionPercentage: number;
|
||||
lastModified: string;
|
||||
}
|
||||
| {
|
||||
error: string;
|
||||
}
|
||||
| null;
|
||||
|
||||
type TaskMasterDetectionResult = {
|
||||
hasTaskmaster: boolean;
|
||||
hasEssentialFiles?: boolean;
|
||||
files?: Record<string, boolean>;
|
||||
metadata?: TaskMasterMetadata;
|
||||
path?: string;
|
||||
reason?: string;
|
||||
};
|
||||
|
||||
type NormalizedTaskMasterInfo = {
|
||||
hasTaskmaster: boolean;
|
||||
hasEssentialFiles: boolean;
|
||||
metadata: TaskMasterMetadata;
|
||||
status: 'configured' | 'not-configured';
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterByIdResult = {
|
||||
projectId: string;
|
||||
projectPath: string;
|
||||
taskmaster: NormalizedTaskMasterInfo;
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterDependencies = {
|
||||
resolveProjectPathById: (projectId: string) => string | null;
|
||||
detectTaskMasterFolder: (projectPath: string) => Promise<TaskMasterDetectionResult>;
|
||||
};
|
||||
|
||||
type GetProjectTaskMasterResolver = (projectId: string) => Promise<GetProjectTaskMasterByIdResult | null>;
|
||||
|
||||
function extractTasksFromJson(tasksData: unknown): TaskMasterTask[] {
|
||||
if (!tasksData || typeof tasksData !== 'object') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const legacyTasks = (tasksData as { tasks?: unknown }).tasks;
|
||||
if (Array.isArray(legacyTasks)) {
|
||||
return legacyTasks as TaskMasterTask[];
|
||||
}
|
||||
|
||||
const taggedTaskCollections: TaskMasterTask[] = [];
|
||||
for (const tagValue of Object.values(tasksData)) {
|
||||
if (!tagValue || typeof tagValue !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const tagTasks = (tagValue as { tasks?: unknown }).tasks;
|
||||
if (Array.isArray(tagTasks)) {
|
||||
taggedTaskCollections.push(...(tagTasks as TaskMasterTask[]));
|
||||
}
|
||||
}
|
||||
|
||||
return taggedTaskCollections;
|
||||
}
|
||||
|
||||
async function detectTaskMasterFolder(projectPath: string): Promise<TaskMasterDetectionResult> {
|
||||
try {
|
||||
const taskMasterPath = path.join(projectPath, '.taskmaster');
|
||||
|
||||
try {
|
||||
const taskMasterStats = await stat(taskMasterPath);
|
||||
if (!taskMasterStats.isDirectory()) {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster exists but is not a directory',
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
const fileError = error as NodeJS.ErrnoException;
|
||||
if (fileError.code === 'ENOENT') {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster directory not found',
|
||||
};
|
||||
}
|
||||
|
||||
throw fileError;
|
||||
}
|
||||
|
||||
const keyFiles = ['tasks/tasks.json', 'config.json'];
|
||||
const fileStatus: Record<string, boolean> = {};
|
||||
let hasEssentialFiles = true;
|
||||
|
||||
for (const fileName of keyFiles) {
|
||||
const absoluteFilePath = path.join(taskMasterPath, fileName);
|
||||
try {
|
||||
await access(absoluteFilePath);
|
||||
fileStatus[fileName] = true;
|
||||
} catch {
|
||||
fileStatus[fileName] = false;
|
||||
if (fileName === 'tasks/tasks.json') {
|
||||
hasEssentialFiles = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let taskMetadata: TaskMasterMetadata = null;
|
||||
if (fileStatus['tasks/tasks.json']) {
|
||||
const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json');
|
||||
try {
|
||||
const tasksContent = await readFile(tasksPath, 'utf8');
|
||||
const parsedTasksJson = JSON.parse(tasksContent) as unknown;
|
||||
const tasks = extractTasksFromJson(parsedTasksJson);
|
||||
|
||||
const stats = tasks.reduce(
|
||||
(accumulator, currentTask) => {
|
||||
accumulator.total += 1;
|
||||
const normalizedTaskStatus = currentTask.status || 'pending';
|
||||
accumulator.byStatus[normalizedTaskStatus] = (accumulator.byStatus[normalizedTaskStatus] || 0) + 1;
|
||||
|
||||
if (Array.isArray(currentTask.subtasks)) {
|
||||
for (const subtask of currentTask.subtasks) {
|
||||
accumulator.subtotalTasks += 1;
|
||||
const normalizedSubtaskStatus = subtask.status || 'pending';
|
||||
accumulator.subtaskByStatus[normalizedSubtaskStatus] =
|
||||
(accumulator.subtaskByStatus[normalizedSubtaskStatus] || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return accumulator;
|
||||
},
|
||||
{
|
||||
total: 0,
|
||||
subtotalTasks: 0,
|
||||
byStatus: {} as Record<string, number>,
|
||||
subtaskByStatus: {} as Record<string, number>,
|
||||
},
|
||||
);
|
||||
|
||||
const tasksStat = await stat(tasksPath);
|
||||
taskMetadata = {
|
||||
taskCount: stats.total,
|
||||
subtaskCount: stats.subtotalTasks,
|
||||
completed: stats.byStatus.done || 0,
|
||||
pending: stats.byStatus.pending || 0,
|
||||
inProgress: stats.byStatus['in-progress'] || 0,
|
||||
review: stats.byStatus.review || 0,
|
||||
completionPercentage: stats.total > 0 ? Math.round(((stats.byStatus.done || 0) / stats.total) * 100) : 0,
|
||||
lastModified: tasksStat.mtime.toISOString(),
|
||||
};
|
||||
} catch (parseError) {
|
||||
console.warn('Failed to parse tasks.json:', (parseError as Error).message);
|
||||
taskMetadata = {
|
||||
error: 'Failed to parse tasks.json',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles,
|
||||
files: fileStatus,
|
||||
metadata: taskMetadata,
|
||||
path: taskMasterPath,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error detecting TaskMaster folder:', error);
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: `Error checking directory: ${(error as Error).message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeTaskMasterInfo(taskMasterResult: TaskMasterDetectionResult | null = null): NormalizedTaskMasterInfo {
|
||||
const hasTaskmaster = Boolean(taskMasterResult?.hasTaskmaster);
|
||||
const hasEssentialFiles = Boolean(taskMasterResult?.hasEssentialFiles);
|
||||
|
||||
return {
|
||||
hasTaskmaster,
|
||||
hasEssentialFiles,
|
||||
metadata: taskMasterResult?.metadata ?? null,
|
||||
status: hasTaskmaster && hasEssentialFiles ? 'configured' : 'not-configured',
|
||||
};
|
||||
}
|
||||
|
||||
const defaultDependencies: GetProjectTaskMasterDependencies = {
|
||||
resolveProjectPathById: (projectId: string): string | null => projectsDb.getProjectPathById(projectId),
|
||||
detectTaskMasterFolder,
|
||||
};
|
||||
|
||||
export async function getProjectTaskMasterById(
|
||||
projectId: string,
|
||||
dependencies: GetProjectTaskMasterDependencies = defaultDependencies,
|
||||
): Promise<GetProjectTaskMasterByIdResult | null> {
|
||||
const projectPath = dependencies.resolveProjectPathById(projectId);
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const taskMasterResult = await dependencies.detectTaskMasterFolder(projectPath);
|
||||
return {
|
||||
projectId,
|
||||
projectPath,
|
||||
taskmaster: normalizeTaskMasterInfo(taskMasterResult),
|
||||
};
|
||||
}
|
||||
|
||||
export async function getProjectTaskMaster(
|
||||
projectId: string,
|
||||
resolveById: GetProjectTaskMasterResolver = getProjectTaskMasterById,
|
||||
): Promise<GetProjectTaskMasterByIdResult> {
|
||||
const normalizedProjectId = projectId.trim();
|
||||
if (!normalizedProjectId) {
|
||||
throw new AppError('projectId is required', {
|
||||
code: 'PROJECT_ID_REQUIRED',
|
||||
statusCode: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const taskMasterDetails = await resolveById(normalizedProjectId);
|
||||
if (!taskMasterDetails) {
|
||||
throw new AppError('Project not found', {
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
return taskMasterDetails;
|
||||
}
|
||||
160
server/modules/projects/tests/project-clone.service.test.ts
Normal file
160
server/modules/projects/tests/project-clone.service.test.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import path from 'node:path';
|
||||
import { PassThrough } from 'node:stream';
|
||||
import test from 'node:test';
|
||||
|
||||
import { startCloneProject } from '@/modules/projects/services/project-clone.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
type TestDependencies = Parameters<typeof startCloneProject>[2];
|
||||
|
||||
function buildDependencies(overrides: Partial<NonNullable<TestDependencies>> = {}): NonNullable<TestDependencies> {
|
||||
return {
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/root' }),
|
||||
ensureDirectory: async () => undefined,
|
||||
pathExists: async () => false,
|
||||
removePath: async () => undefined,
|
||||
getGithubTokenById: async () => ({ github_token: 'token-value' }),
|
||||
spawnGitClone: () => {
|
||||
throw new Error('spawnGitClone should be overridden in this test');
|
||||
},
|
||||
registerProject: async () => ({ project: { projectId: 'project-1' } }),
|
||||
logError: () => undefined,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createMockGitProcess() {
|
||||
const emitter = new EventEmitter() as EventEmitter & {
|
||||
stdout: PassThrough;
|
||||
stderr: PassThrough;
|
||||
kill: () => void;
|
||||
};
|
||||
|
||||
emitter.stdout = new PassThrough();
|
||||
emitter.stderr = new PassThrough();
|
||||
emitter.kill = () => {
|
||||
emitter.emit('close', null);
|
||||
};
|
||||
|
||||
return emitter;
|
||||
}
|
||||
|
||||
test('startCloneProject rejects when workspace path is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '',
|
||||
githubUrl: 'https://github.com/example/repo',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'WORKSPACE_PATH_REQUIRED');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects when github URL is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: '',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies(),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'GITHUB_URL_REQUIRED');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject rejects when selected github token does not exist', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: 'https://github.com/example/repo',
|
||||
githubTokenId: 12,
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: () => undefined,
|
||||
onComplete: () => undefined,
|
||||
},
|
||||
buildDependencies({
|
||||
getGithubTokenById: async () => null,
|
||||
}),
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'GITHUB_TOKEN_NOT_FOUND');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('startCloneProject completes and emits complete payload when git exits successfully', async () => {
|
||||
const gitProcess = createMockGitProcess();
|
||||
const progressMessages: string[] = [];
|
||||
let completePayload: { project: Record<string, unknown>; message: string } | null = null;
|
||||
let capturedProjectPath = '';
|
||||
let capturedCustomName = '';
|
||||
|
||||
const operation = await startCloneProject(
|
||||
{
|
||||
workspacePath: '/workspace/root',
|
||||
githubUrl: 'https://github.com/example/repo.git',
|
||||
userId: 1,
|
||||
},
|
||||
{
|
||||
onProgress: (message) => {
|
||||
progressMessages.push(message);
|
||||
},
|
||||
onComplete: (payload: { project: Record<string, unknown>; message: string }) => {
|
||||
completePayload = payload;
|
||||
},
|
||||
},
|
||||
buildDependencies({
|
||||
spawnGitClone: () => gitProcess as any,
|
||||
registerProject: async (projectPath, customName) => {
|
||||
capturedProjectPath = projectPath;
|
||||
capturedCustomName = customName;
|
||||
return { project: { projectId: 'project-1', path: projectPath } };
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
gitProcess.emit('close', 0);
|
||||
await operation.waitForCompletion;
|
||||
|
||||
assert.ok(progressMessages.some((message) => message.includes("Cloning into 'repo'")));
|
||||
assert.equal(capturedCustomName, 'repo');
|
||||
assert.equal(path.basename(capturedProjectPath), 'repo');
|
||||
assert.notEqual(completePayload, null);
|
||||
const resolvedCompletePayload = completePayload as unknown as {
|
||||
project: Record<string, unknown>;
|
||||
message: string;
|
||||
};
|
||||
assert.equal(resolvedCompletePayload.message, 'Repository cloned successfully');
|
||||
assert.equal((resolvedCompletePayload.project.projectId as string) || '', 'project-1');
|
||||
});
|
||||
117
server/modules/projects/tests/project-management.service.test.ts
Normal file
117
server/modules/projects/tests/project-management.service.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { createProject } from '@/modules/projects/services/project-management.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
const projectRow = {
|
||||
project_id: 'project-1',
|
||||
project_path: '/workspace/my-project',
|
||||
custom_project_name: 'my-project',
|
||||
isStarred: 0,
|
||||
isArchived: 0,
|
||||
};
|
||||
|
||||
test('createProject throws when project path is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () => createProject({ projectPath: '' }),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_PATH_REQUIRED');
|
||||
assert.equal(error.statusCode, 400);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject throws when path validation fails', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
createProject(
|
||||
{ projectPath: '/invalid/path' },
|
||||
{
|
||||
validatePath: async () => ({ valid: false, error: 'blocked path' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({ outcome: 'created', project: projectRow }),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'INVALID_PROJECT_PATH');
|
||||
assert.equal(error.statusCode, 400);
|
||||
assert.equal(error.details, 'blocked path');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject throws conflict when active project path already exists', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
createProject(
|
||||
{ projectPath: '/workspace/my-project' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({ outcome: 'active_conflict', project: projectRow }),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_ALREADY_EXISTS');
|
||||
assert.equal(error.statusCode, 409);
|
||||
assert.equal(error.details, 'Project path already exists: /workspace/my-project');
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('createProject falls back to directory name when custom name is not provided', async () => {
|
||||
let capturedCustomName: string | null = null;
|
||||
|
||||
const result = await createProject(
|
||||
{ projectPath: '/workspace/my-project', customName: '' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: (_projectPath, customName) => {
|
||||
capturedCustomName = customName;
|
||||
return {
|
||||
outcome: 'created',
|
||||
project: {
|
||||
...projectRow,
|
||||
custom_project_name: customName,
|
||||
},
|
||||
};
|
||||
},
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(capturedCustomName, 'my-project');
|
||||
assert.equal(result.outcome, 'created');
|
||||
assert.equal(result.project.displayName, 'my-project');
|
||||
});
|
||||
|
||||
test('createProject returns archived reuse outcome when archived row is reused', async () => {
|
||||
const result = await createProject(
|
||||
{ projectPath: '/workspace/my-project' },
|
||||
{
|
||||
validatePath: async () => ({ valid: true, resolvedPath: '/workspace/my-project' }),
|
||||
ensureWorkspaceDirectory: async () => undefined,
|
||||
persistProjectPath: () => ({
|
||||
outcome: 'reactivated_archived',
|
||||
project: {
|
||||
...projectRow,
|
||||
isArchived: 1,
|
||||
},
|
||||
}),
|
||||
getProjectByPath: () => projectRow,
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(result.outcome, 'reactivated_archived');
|
||||
assert.equal(result.project.isArchived, true);
|
||||
});
|
||||
@@ -0,0 +1,105 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import {
|
||||
getProjectTaskMaster,
|
||||
getProjectTaskMasterById,
|
||||
} from '@/modules/projects/services/projects-has-taskmaster.service.js';
|
||||
import { AppError } from '@/shared/utils.js';
|
||||
|
||||
test('getProjectTaskMasterById returns null when project path is missing', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => null,
|
||||
detectTaskMasterFolder: async () => {
|
||||
throw new Error('detectTaskMasterFolder should not be called when path is missing');
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(result, null);
|
||||
});
|
||||
|
||||
test('getProjectTaskMasterById returns configured status when taskmaster exists with essential files', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => '/workspace/project-1',
|
||||
detectTaskMasterFolder: async () => ({
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles: true,
|
||||
metadata: {
|
||||
taskCount: 3,
|
||||
subtaskCount: 0,
|
||||
completed: 1,
|
||||
pending: 2,
|
||||
inProgress: 0,
|
||||
review: 0,
|
||||
completionPercentage: 33,
|
||||
lastModified: '2026-01-01T00:00:00.000Z',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
assert.ok(result);
|
||||
assert.equal(result.projectId, 'project-1');
|
||||
assert.equal(result.projectPath, '/workspace/project-1');
|
||||
assert.equal(result.taskmaster.hasTaskmaster, true);
|
||||
assert.equal(result.taskmaster.hasEssentialFiles, true);
|
||||
assert.equal(result.taskmaster.status, 'configured');
|
||||
assert.deepEqual(result.taskmaster.metadata, {
|
||||
taskCount: 3,
|
||||
subtaskCount: 0,
|
||||
completed: 1,
|
||||
pending: 2,
|
||||
inProgress: 0,
|
||||
review: 0,
|
||||
completionPercentage: 33,
|
||||
lastModified: '2026-01-01T00:00:00.000Z',
|
||||
});
|
||||
});
|
||||
|
||||
test('getProjectTaskMasterById returns not-configured status when taskmaster is missing', async () => {
|
||||
const result = await getProjectTaskMasterById('project-1', {
|
||||
resolveProjectPathById: () => '/workspace/project-1',
|
||||
detectTaskMasterFolder: async () => ({
|
||||
hasTaskmaster: false,
|
||||
}),
|
||||
});
|
||||
|
||||
assert.ok(result);
|
||||
assert.equal(result.taskmaster.hasTaskmaster, false);
|
||||
assert.equal(result.taskmaster.hasEssentialFiles, false);
|
||||
assert.equal(result.taskmaster.status, 'not-configured');
|
||||
assert.equal(result.taskmaster.metadata, null);
|
||||
});
|
||||
|
||||
test('getProjectTaskMaster throws when project id is missing', async () => {
|
||||
await assert.rejects(
|
||||
async () =>
|
||||
getProjectTaskMaster('', async () => ({
|
||||
projectId: 'project-1',
|
||||
projectPath: '/workspace/project-1',
|
||||
taskmaster: {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles: true,
|
||||
metadata: null,
|
||||
status: 'configured',
|
||||
},
|
||||
})),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_ID_REQUIRED');
|
||||
assert.equal(error.statusCode, 400);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('getProjectTaskMaster throws when project does not exist', async () => {
|
||||
await assert.rejects(
|
||||
async () => getProjectTaskMaster('project-that-does-not-exist', async () => null),
|
||||
(error: unknown) => {
|
||||
assert.ok(error instanceof AppError);
|
||||
assert.equal(error.code, 'PROJECT_NOT_FOUND');
|
||||
assert.equal(error.statusCode, 404);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import test from 'node:test';
|
||||
import {
|
||||
createProjectsSnapshot,
|
||||
} from '@/modules/projects/index.js';
|
||||
import { ProjectListItem, ProjectsSnapshot } from '@/modules/projects/services/projects.service.js';
|
||||
import { ProjectListItem, ProjectsSnapshot } from '@/modules/projects/services/projects-with-sessions-fetch.service.js';
|
||||
|
||||
test('createProjectsSnapshot returns an object matching the predefined snapshot type', () => {
|
||||
const projects: ProjectListItem[] = [
|
||||
|
||||
@@ -222,7 +222,7 @@ Only chat sockets (`/ws`) are tracked in `connectedClients`.
|
||||
|
||||
That shared set is consumed by:
|
||||
|
||||
1. `modules/projects/services/projects.service.ts`
|
||||
1. `modules/projects/services/projects-with-sessions-fetch.service.ts`
|
||||
Broadcasts `loading_progress` while project snapshots are being built.
|
||||
2. `modules/providers/services/sessions-watcher.service.ts`
|
||||
Broadcasts `projects_updated` when provider session artifacts change.
|
||||
|
||||
@@ -35,146 +35,6 @@ import { generateDisplayName } from '@/modules/projects';
|
||||
import sessionManager from './sessionManager.js';
|
||||
import { projectsDb, sessionsDb } from './modules/database/index.js';
|
||||
|
||||
// Import TaskMaster detection functions
|
||||
async function detectTaskMasterFolder(projectPath) {
|
||||
try {
|
||||
const taskMasterPath = path.join(projectPath, '.taskmaster');
|
||||
|
||||
// Check if .taskmaster directory exists
|
||||
try {
|
||||
const stats = await fs.stat(taskMasterPath);
|
||||
if (!stats.isDirectory()) {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster exists but is not a directory'
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: '.taskmaster directory not found'
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Check for key TaskMaster files
|
||||
const keyFiles = [
|
||||
'tasks/tasks.json',
|
||||
'config.json'
|
||||
];
|
||||
|
||||
const fileStatus = {};
|
||||
let hasEssentialFiles = true;
|
||||
|
||||
for (const file of keyFiles) {
|
||||
const filePath = path.join(taskMasterPath, file);
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
fileStatus[file] = true;
|
||||
} catch (error) {
|
||||
fileStatus[file] = false;
|
||||
if (file === 'tasks/tasks.json') {
|
||||
hasEssentialFiles = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse tasks.json if it exists for metadata
|
||||
let taskMetadata = null;
|
||||
if (fileStatus['tasks/tasks.json']) {
|
||||
try {
|
||||
const tasksPath = path.join(taskMasterPath, 'tasks/tasks.json');
|
||||
const tasksContent = await fs.readFile(tasksPath, 'utf8');
|
||||
const tasksData = JSON.parse(tasksContent);
|
||||
|
||||
// Handle both tagged and legacy formats
|
||||
let tasks = [];
|
||||
if (tasksData.tasks) {
|
||||
// Legacy format
|
||||
tasks = tasksData.tasks;
|
||||
} else {
|
||||
// Tagged format - get tasks from all tags
|
||||
Object.values(tasksData).forEach(tagData => {
|
||||
if (tagData.tasks) {
|
||||
tasks = tasks.concat(tagData.tasks);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate task statistics
|
||||
const stats = tasks.reduce((acc, task) => {
|
||||
acc.total++;
|
||||
acc[task.status] = (acc[task.status] || 0) + 1;
|
||||
|
||||
// Count subtasks
|
||||
if (task.subtasks) {
|
||||
task.subtasks.forEach(subtask => {
|
||||
acc.subtotalTasks++;
|
||||
acc.subtasks = acc.subtasks || {};
|
||||
acc.subtasks[subtask.status] = (acc.subtasks[subtask.status] || 0) + 1;
|
||||
});
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {
|
||||
total: 0,
|
||||
subtotalTasks: 0,
|
||||
pending: 0,
|
||||
'in-progress': 0,
|
||||
done: 0,
|
||||
review: 0,
|
||||
deferred: 0,
|
||||
cancelled: 0,
|
||||
subtasks: {}
|
||||
});
|
||||
|
||||
taskMetadata = {
|
||||
taskCount: stats.total,
|
||||
subtaskCount: stats.subtotalTasks,
|
||||
completed: stats.done || 0,
|
||||
pending: stats.pending || 0,
|
||||
inProgress: stats['in-progress'] || 0,
|
||||
review: stats.review || 0,
|
||||
completionPercentage: stats.total > 0 ? Math.round((stats.done / stats.total) * 100) : 0,
|
||||
lastModified: (await fs.stat(tasksPath)).mtime.toISOString()
|
||||
};
|
||||
} catch (parseError) {
|
||||
console.warn('Failed to parse tasks.json:', parseError.message);
|
||||
taskMetadata = { error: 'Failed to parse tasks.json' };
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
hasTaskmaster: true,
|
||||
hasEssentialFiles,
|
||||
files: fileStatus,
|
||||
metadata: taskMetadata,
|
||||
path: taskMasterPath
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error detecting TaskMaster folder:', error);
|
||||
return {
|
||||
hasTaskmaster: false,
|
||||
reason: `Error checking directory: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeTaskMasterInfo(taskMasterResult = null) {
|
||||
const hasTaskmaster = Boolean(taskMasterResult?.hasTaskmaster);
|
||||
const hasEssentialFiles = Boolean(taskMasterResult?.hasEssentialFiles);
|
||||
|
||||
return {
|
||||
hasTaskmaster,
|
||||
hasEssentialFiles,
|
||||
metadata: taskMasterResult?.metadata ?? null,
|
||||
status: hasTaskmaster && hasEssentialFiles ? 'configured' : 'not-configured'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the absolute project path for a database `projectId`.
|
||||
*
|
||||
@@ -209,27 +69,6 @@ function claudeFolderNameFromPath(projectPath) {
|
||||
return projectPath.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
}
|
||||
|
||||
/**
|
||||
* TaskMaster details for a project, addressed by DB `projectId`.
|
||||
*
|
||||
* Resolves the project path through the DB and inspects the `.taskmaster`
|
||||
* folder on disk for metadata the TaskMaster panel displays.
|
||||
*/
|
||||
async function getProjectTaskMasterById(projectId) {
|
||||
const projectPath = await getProjectPathById(projectId);
|
||||
if (!projectPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const taskMasterResult = await detectTaskMasterFolder(projectPath);
|
||||
|
||||
return {
|
||||
projectId,
|
||||
projectPath,
|
||||
taskmaster: normalizeTaskMasterInfo(taskMasterResult)
|
||||
};
|
||||
}
|
||||
|
||||
// Cache for extracted project directories
|
||||
const projectDirectoryCache = new Map();
|
||||
|
||||
@@ -2220,7 +2059,6 @@ export {
|
||||
deleteSessionById,
|
||||
deleteProjectById,
|
||||
addProjectManually,
|
||||
getProjectTaskMasterById,
|
||||
getProjectPathById,
|
||||
claudeFolderNameFromPath,
|
||||
clearProjectDirectoryCache,
|
||||
|
||||
@@ -4,8 +4,7 @@ import path from 'path';
|
||||
import os from 'os';
|
||||
import { promises as fs } from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { userDb, apiKeysDb, githubTokensDb } from '../modules/database/index.js';
|
||||
import { addProjectManually } from '../projects.js';
|
||||
import { userDb, apiKeysDb, githubTokensDb, projectsDb } from '../modules/database/index.js';
|
||||
import { queryClaudeSDK } from '../claude-sdk.js';
|
||||
import { spawnCursor } from '../cursor-cli.js';
|
||||
import { queryCodex } from '../openai-codex.js';
|
||||
@@ -900,19 +899,12 @@ router.post('/', validateExternalApiKey, async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
// Register the project (or use existing registration)
|
||||
let project;
|
||||
try {
|
||||
project = await addProjectManually(finalProjectPath);
|
||||
console.log('📦 Project registered:', project);
|
||||
} catch (error) {
|
||||
// If project already exists, that's fine - continue with the existing registration
|
||||
if (error.message && error.message.includes('Project already configured')) {
|
||||
console.log('📦 Using existing project registration for:', finalProjectPath);
|
||||
project = { path: finalProjectPath };
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
// Register project path in DB (or reuse existing active registration)
|
||||
const registrationResult = projectsDb.createProjectPath(finalProjectPath, null);
|
||||
if (registrationResult.outcome === 'active_conflict') {
|
||||
console.log('Project registration already exists for:', finalProjectPath);
|
||||
} else {
|
||||
console.log('Project registered:', registrationResult.project);
|
||||
}
|
||||
|
||||
// Set up writer based on streaming mode
|
||||
|
||||
@@ -1,413 +0,0 @@
|
||||
import express from 'express';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import { spawn } from 'child_process';
|
||||
import os from 'os';
|
||||
import { addProjectManually } from '../projects.js';
|
||||
import { githubTokensDb } from '../modules/database/index.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
function sanitizeGitError(message, token) {
|
||||
if (!message || !token) return message;
|
||||
return message.replace(new RegExp(token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g'), '***');
|
||||
}
|
||||
|
||||
// Configure allowed workspace root (defaults to user's home directory)
|
||||
export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir();
|
||||
|
||||
// System-critical paths that should never be used as workspace directories
|
||||
export const FORBIDDEN_PATHS = [
|
||||
// Unix
|
||||
'/',
|
||||
'/etc',
|
||||
'/bin',
|
||||
'/sbin',
|
||||
'/usr',
|
||||
'/dev',
|
||||
'/proc',
|
||||
'/sys',
|
||||
'/var',
|
||||
'/boot',
|
||||
'/root',
|
||||
'/lib',
|
||||
'/lib64',
|
||||
'/opt',
|
||||
'/tmp',
|
||||
'/run',
|
||||
// Windows
|
||||
'C:\\Windows',
|
||||
'C:\\Program Files',
|
||||
'C:\\Program Files (x86)',
|
||||
'C:\\ProgramData',
|
||||
'C:\\System Volume Information',
|
||||
'C:\\$Recycle.Bin'
|
||||
];
|
||||
|
||||
/**
|
||||
* Validates that a path is safe for workspace operations
|
||||
* @param {string} requestedPath - The path to validate
|
||||
* @returns {Promise<{valid: boolean, resolvedPath?: string, error?: string}>}
|
||||
*/
|
||||
export async function validateWorkspacePath(requestedPath) {
|
||||
try {
|
||||
// Resolve to absolute path
|
||||
let absolutePath = path.resolve(requestedPath);
|
||||
|
||||
// Check if path is a forbidden system directory
|
||||
const normalizedPath = path.normalize(absolutePath);
|
||||
if (FORBIDDEN_PATHS.includes(normalizedPath) || normalizedPath === '/') {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Cannot use system-critical directories as workspace locations'
|
||||
};
|
||||
}
|
||||
|
||||
// Additional check for paths starting with forbidden directories
|
||||
for (const forbidden of FORBIDDEN_PATHS) {
|
||||
if (normalizedPath === forbidden ||
|
||||
normalizedPath.startsWith(forbidden + path.sep)) {
|
||||
// Exception: /var/tmp and similar user-accessible paths might be allowed
|
||||
// but /var itself and most /var subdirectories should be blocked
|
||||
if (forbidden === '/var' &&
|
||||
(normalizedPath.startsWith('/var/tmp') ||
|
||||
normalizedPath.startsWith('/var/folders'))) {
|
||||
continue; // Allow these specific cases
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot create workspace in system directory: ${forbidden}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Try to resolve the real path (following symlinks)
|
||||
let realPath;
|
||||
try {
|
||||
// Check if path exists to resolve real path
|
||||
await fs.access(absolutePath);
|
||||
realPath = await fs.realpath(absolutePath);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// Path doesn't exist yet - check parent directory
|
||||
let parentPath = path.dirname(absolutePath);
|
||||
try {
|
||||
const parentRealPath = await fs.realpath(parentPath);
|
||||
|
||||
// Reconstruct the full path with real parent
|
||||
realPath = path.join(parentRealPath, path.basename(absolutePath));
|
||||
} catch (parentError) {
|
||||
if (parentError.code === 'ENOENT') {
|
||||
// Parent doesn't exist either - use the absolute path as-is
|
||||
// We'll validate it's within allowed root
|
||||
realPath = absolutePath;
|
||||
} else {
|
||||
throw parentError;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the workspace root to its real path
|
||||
const resolvedWorkspaceRoot = await fs.realpath(WORKSPACES_ROOT);
|
||||
|
||||
// Ensure the resolved path is contained within the allowed workspace root
|
||||
if (!realPath.startsWith(resolvedWorkspaceRoot + path.sep) &&
|
||||
realPath !== resolvedWorkspaceRoot) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}`
|
||||
};
|
||||
}
|
||||
|
||||
// Additional symlink check for existing paths
|
||||
try {
|
||||
await fs.access(absolutePath);
|
||||
const stats = await fs.lstat(absolutePath);
|
||||
|
||||
if (stats.isSymbolicLink()) {
|
||||
// Verify symlink target is also within allowed root
|
||||
const linkTarget = await fs.readlink(absolutePath);
|
||||
const resolvedTarget = path.resolve(path.dirname(absolutePath), linkTarget);
|
||||
const realTarget = await fs.realpath(resolvedTarget);
|
||||
|
||||
if (!realTarget.startsWith(resolvedWorkspaceRoot + path.sep) &&
|
||||
realTarget !== resolvedWorkspaceRoot) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Symlink target is outside the allowed workspace root'
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
// Path doesn't exist - that's fine for new workspace creation
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
resolvedPath: realPath
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Path validation failed: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new workspace
|
||||
* POST /api/projects/create-workspace
|
||||
*
|
||||
* Body:
|
||||
* - workspaceType: 'existing' | 'new'
|
||||
* - path: string (workspace path)
|
||||
*/
|
||||
router.post('/create-workspace', async (req, res) => {
|
||||
try {
|
||||
const { workspaceType, path: workspacePath } = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!workspaceType || !workspacePath) {
|
||||
return res.status(400).json({ error: 'workspaceType and path are required' });
|
||||
}
|
||||
|
||||
if (!['existing', 'new'].includes(workspaceType)) {
|
||||
return res.status(400).json({ error: 'workspaceType must be "existing" or "new"' });
|
||||
}
|
||||
|
||||
// Repository cloning is handled by /api/projects/clone-progress (SSE).
|
||||
if (req.body.githubUrl || req.body.githubTokenId || req.body.newGithubToken) {
|
||||
return res.status(400).json({
|
||||
error: 'Repository cloning is not supported on create-workspace',
|
||||
details: 'Use /api/projects/clone-progress for cloning workflows'
|
||||
});
|
||||
}
|
||||
|
||||
// Validate path safety before any operations
|
||||
const validation = await validateWorkspacePath(workspacePath);
|
||||
if (!validation.valid) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid workspace path',
|
||||
details: validation.error
|
||||
});
|
||||
}
|
||||
|
||||
const absolutePath = validation.resolvedPath;
|
||||
|
||||
// Handle existing workspace
|
||||
if (workspaceType === 'existing') {
|
||||
// Check if the path exists
|
||||
try {
|
||||
await fs.access(absolutePath);
|
||||
const stats = await fs.stat(absolutePath);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
return res.status(400).json({ error: 'Path exists but is not a directory' });
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).json({ error: 'Workspace path does not exist' });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Add the existing workspace to the project list
|
||||
const project = await addProjectManually(absolutePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'Existing workspace added successfully'
|
||||
});
|
||||
}
|
||||
|
||||
// Handle new workspace creation
|
||||
if (workspaceType === 'new') {
|
||||
// Create the directory if it doesn't exist
|
||||
await fs.mkdir(absolutePath, { recursive: true });
|
||||
|
||||
// Add the new workspace to the project list (no clone)
|
||||
const project = await addProjectManually(absolutePath);
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
project,
|
||||
message: 'New workspace created successfully'
|
||||
});
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error creating workspace:', error);
|
||||
res.status(500).json({
|
||||
error: error.message || 'Failed to create workspace',
|
||||
details: process.env.NODE_ENV === 'development' ? error.stack : undefined
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function to get GitHub token from database
|
||||
*/
|
||||
async function getGithubTokenById(tokenId, userId) {
|
||||
return githubTokensDb.getGithubTokenById(userId, tokenId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone repository with progress streaming (SSE)
|
||||
* GET /api/projects/clone-progress
|
||||
*/
|
||||
router.get('/clone-progress', async (req, res) => {
|
||||
const { path: workspacePath, githubUrl, githubTokenId, newGithubToken } = req.query;
|
||||
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const sendEvent = (type, data) => {
|
||||
res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`);
|
||||
};
|
||||
|
||||
try {
|
||||
if (!workspacePath || !githubUrl) {
|
||||
sendEvent('error', { message: 'workspacePath and githubUrl are required' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const validation = await validateWorkspacePath(workspacePath);
|
||||
if (!validation.valid) {
|
||||
sendEvent('error', { message: validation.error });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const absolutePath = validation.resolvedPath;
|
||||
|
||||
await fs.mkdir(absolutePath, { recursive: true });
|
||||
|
||||
let githubToken = null;
|
||||
if (githubTokenId) {
|
||||
const token = await getGithubTokenById(parseInt(githubTokenId), req.user.id);
|
||||
if (!token) {
|
||||
await fs.rm(absolutePath, { recursive: true, force: true });
|
||||
sendEvent('error', { message: 'GitHub token not found' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
githubToken = token.github_token;
|
||||
} else if (newGithubToken) {
|
||||
githubToken = newGithubToken;
|
||||
}
|
||||
|
||||
const normalizedUrl = githubUrl.replace(/\/+$/, '').replace(/\.git$/, '');
|
||||
const repoName = normalizedUrl.split('/').pop() || 'repository';
|
||||
const clonePath = path.join(absolutePath, repoName);
|
||||
|
||||
// Check if clone destination already exists to prevent data loss
|
||||
try {
|
||||
await fs.access(clonePath);
|
||||
sendEvent('error', { message: `Directory "${repoName}" already exists. Please choose a different location or remove the existing directory.` });
|
||||
res.end();
|
||||
return;
|
||||
} catch (err) {
|
||||
// Directory doesn't exist, which is what we want
|
||||
}
|
||||
|
||||
let cloneUrl = githubUrl;
|
||||
if (githubToken) {
|
||||
try {
|
||||
const url = new URL(githubUrl);
|
||||
url.username = githubToken;
|
||||
url.password = '';
|
||||
cloneUrl = url.toString();
|
||||
} catch (error) {
|
||||
// SSH URL or invalid - use as-is
|
||||
}
|
||||
}
|
||||
|
||||
sendEvent('progress', { message: `Cloning into '${repoName}'...` });
|
||||
|
||||
const gitProcess = spawn('git', ['clone', '--progress', cloneUrl, clonePath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_TERMINAL_PROMPT: '0'
|
||||
}
|
||||
});
|
||||
|
||||
let lastError = '';
|
||||
|
||||
gitProcess.stdout.on('data', (data) => {
|
||||
const message = data.toString().trim();
|
||||
if (message) {
|
||||
sendEvent('progress', { message });
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.stderr.on('data', (data) => {
|
||||
const message = data.toString().trim();
|
||||
lastError = message;
|
||||
if (message) {
|
||||
sendEvent('progress', { message });
|
||||
}
|
||||
});
|
||||
|
||||
gitProcess.on('close', async (code) => {
|
||||
if (code === 0) {
|
||||
try {
|
||||
const project = await addProjectManually(clonePath);
|
||||
sendEvent('complete', { project, message: 'Repository cloned successfully' });
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: `Clone succeeded but failed to add project: ${error.message}` });
|
||||
}
|
||||
} else {
|
||||
const sanitizedError = sanitizeGitError(lastError, githubToken);
|
||||
let errorMessage = 'Git clone failed';
|
||||
if (lastError.includes('Authentication failed') || lastError.includes('could not read Username')) {
|
||||
errorMessage = 'Authentication failed. Please check your credentials.';
|
||||
} else if (lastError.includes('Repository not found')) {
|
||||
errorMessage = 'Repository not found. Please check the URL and ensure you have access.';
|
||||
} else if (lastError.includes('already exists')) {
|
||||
errorMessage = 'Directory already exists';
|
||||
} else if (sanitizedError) {
|
||||
errorMessage = sanitizedError;
|
||||
}
|
||||
try {
|
||||
await fs.rm(clonePath, { recursive: true, force: true });
|
||||
} catch (cleanupError) {
|
||||
console.error('Failed to clean up after clone failure:', sanitizeGitError(cleanupError.message, githubToken));
|
||||
}
|
||||
sendEvent('error', { message: errorMessage });
|
||||
}
|
||||
res.end();
|
||||
});
|
||||
|
||||
gitProcess.on('error', (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
sendEvent('error', { message: 'Git is not installed or not in PATH' });
|
||||
} else {
|
||||
sendEvent('error', { message: error.message });
|
||||
}
|
||||
res.end();
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
gitProcess.kill();
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
sendEvent('error', { message: error.message });
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -275,3 +275,54 @@ export type CreateCredentialResult = {
|
||||
credentialName: string;
|
||||
credentialType: string;
|
||||
};
|
||||
|
||||
// ---------------------------
|
||||
//----------------- PROJECT PERSISTENCE TYPES ------------
|
||||
/**
|
||||
* Canonical project row shape returned by the projects repository.
|
||||
*
|
||||
* Use this type whenever backend services need to pass around one database
|
||||
* project record without leaking raw SQL row typing across modules.
|
||||
*/
|
||||
export type ProjectRepositoryRow = {
|
||||
project_id: string;
|
||||
project_path: string;
|
||||
custom_project_name: string | null;
|
||||
isStarred: number;
|
||||
isArchived: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Result category returned by `projectsDb.createProjectPath`.
|
||||
*
|
||||
* `created` means a fresh row was inserted, `reactivated_archived` means an
|
||||
* existing archived path was accepted and updated, and `active_conflict` means
|
||||
* an already-active path blocked project creation.
|
||||
*/
|
||||
export type CreateProjectPathOutcome =
|
||||
| 'created'
|
||||
| 'reactivated_archived'
|
||||
| 'active_conflict';
|
||||
|
||||
/**
|
||||
* Structured result returned by project-path upsert operations.
|
||||
*
|
||||
* Services should use this result to decide whether a request succeeded,
|
||||
* should return a conflict, or needs follow-up retrieval of row metadata.
|
||||
*/
|
||||
export type CreateProjectPathResult = {
|
||||
outcome: CreateProjectPathOutcome;
|
||||
project: ProjectRepositoryRow | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validation result for user-supplied workspace/project paths.
|
||||
*
|
||||
* `resolvedPath` is present only when validation succeeds. `error` is present
|
||||
* only when validation fails and is suitable for user-facing diagnostics.
|
||||
*/
|
||||
export type WorkspacePathValidationResult = {
|
||||
valid: boolean;
|
||||
resolvedPath?: string;
|
||||
error?: string;
|
||||
};
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import { mkdir, readFile, readdir, stat, writeFile } from 'node:fs/promises';
|
||||
import {
|
||||
access,
|
||||
lstat,
|
||||
mkdir,
|
||||
readFile,
|
||||
readdir,
|
||||
readlink,
|
||||
realpath,
|
||||
stat,
|
||||
writeFile,
|
||||
} from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import readline from 'node:readline';
|
||||
|
||||
@@ -11,6 +22,7 @@ import type {
|
||||
ApiSuccessShape,
|
||||
AppErrorOptions,
|
||||
NormalizedMessage,
|
||||
WorkspacePathValidationResult,
|
||||
} from '@/shared/types.js';
|
||||
|
||||
//----------------- NORMALIZED MESSAGE HELPER INPUT TYPES ------------
|
||||
@@ -83,6 +95,154 @@ export class AppError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- WORKSPACE PATH VALIDATION UTILITIES ------------
|
||||
/**
|
||||
* Root directory that all workspace/project paths must stay under.
|
||||
*
|
||||
* This is resolved from `WORKSPACES_ROOT` when configured; otherwise it falls
|
||||
* back to the current user's home directory.
|
||||
*/
|
||||
export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir();
|
||||
|
||||
/**
|
||||
* System-critical paths that must never be used as workspace roots.
|
||||
*
|
||||
* The validation helper blocks these values directly and also blocks paths
|
||||
* nested under them (with explicit allow-list exceptions where necessary).
|
||||
*/
|
||||
export const FORBIDDEN_WORKSPACE_PATHS = [
|
||||
// Unix
|
||||
'/',
|
||||
'/etc',
|
||||
'/bin',
|
||||
'/sbin',
|
||||
'/usr',
|
||||
'/dev',
|
||||
'/proc',
|
||||
'/sys',
|
||||
'/var',
|
||||
'/boot',
|
||||
'/root',
|
||||
'/lib',
|
||||
'/lib64',
|
||||
'/opt',
|
||||
'/tmp',
|
||||
'/run',
|
||||
// Windows
|
||||
'C:\\Windows',
|
||||
'C:\\Program Files',
|
||||
'C:\\Program Files (x86)',
|
||||
'C:\\ProgramData',
|
||||
'C:\\System Volume Information',
|
||||
'C:\\$Recycle.Bin',
|
||||
];
|
||||
|
||||
/**
|
||||
* Validates that a user-supplied workspace path is safe to use.
|
||||
*
|
||||
* Call this before any filesystem mutation that creates or registers projects.
|
||||
* The function resolves symlinks, enforces `WORKSPACES_ROOT` containment, and
|
||||
* blocks known system directories.
|
||||
*/
|
||||
export async function validateWorkspacePath(requestedPath: string): Promise<WorkspacePathValidationResult> {
|
||||
try {
|
||||
const absolutePath = path.resolve(requestedPath);
|
||||
const normalizedPath = path.normalize(absolutePath);
|
||||
|
||||
if (FORBIDDEN_WORKSPACE_PATHS.includes(normalizedPath) || normalizedPath === '/') {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Cannot use system-critical directories as workspace locations',
|
||||
};
|
||||
}
|
||||
|
||||
for (const forbiddenPath of FORBIDDEN_WORKSPACE_PATHS) {
|
||||
if (normalizedPath === forbiddenPath || normalizedPath.startsWith(`${forbiddenPath}${path.sep}`)) {
|
||||
// Allow specific user-writable folders under /var.
|
||||
if (
|
||||
forbiddenPath === '/var'
|
||||
&& (normalizedPath.startsWith('/var/tmp') || normalizedPath.startsWith('/var/folders'))
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot create workspace in system directory: ${forbiddenPath}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let resolvedPath = absolutePath;
|
||||
try {
|
||||
await access(absolutePath);
|
||||
resolvedPath = await realpath(absolutePath);
|
||||
} catch (error) {
|
||||
const fileError = error as NodeJS.ErrnoException;
|
||||
if (fileError.code !== 'ENOENT') {
|
||||
throw fileError;
|
||||
}
|
||||
|
||||
const parentPath = path.dirname(absolutePath);
|
||||
try {
|
||||
const parentRealPath = await realpath(parentPath);
|
||||
resolvedPath = path.join(parentRealPath, path.basename(absolutePath));
|
||||
} catch (parentError) {
|
||||
const parentFileError = parentError as NodeJS.ErrnoException;
|
||||
if (parentFileError.code !== 'ENOENT') {
|
||||
throw parentFileError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const resolvedWorkspaceRoot = await realpath(WORKSPACES_ROOT);
|
||||
if (
|
||||
!resolvedPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`)
|
||||
&& resolvedPath !== resolvedWorkspaceRoot
|
||||
) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}`,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
await access(absolutePath);
|
||||
const pathStats = await lstat(absolutePath);
|
||||
if (pathStats.isSymbolicLink()) {
|
||||
const symlinkTarget = await readlink(absolutePath);
|
||||
const resolvedSymlinkPath = path.resolve(path.dirname(absolutePath), symlinkTarget);
|
||||
const realSymlinkPath = await realpath(resolvedSymlinkPath);
|
||||
if (
|
||||
!realSymlinkPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`)
|
||||
&& realSymlinkPath !== resolvedWorkspaceRoot
|
||||
) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Symlink target is outside the allowed workspace root',
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const fileError = error as NodeJS.ErrnoException;
|
||||
if (fileError.code !== 'ENOENT') {
|
||||
throw fileError;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
resolvedPath,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Path validation failed: ${(error as Error).message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------
|
||||
//----------------- NORMALIZED PROVIDER MESSAGE UTILITIES ------------
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user