mirror of
https://github.com/siteboon/claudecodeui.git
synced 2026-04-30 09:21:33 +00:00
670 lines
20 KiB
TypeScript
670 lines
20 KiB
TypeScript
import { randomUUID } from 'node:crypto';
|
|
import fs from 'node:fs';
|
|
import {
|
|
access,
|
|
lstat,
|
|
mkdir,
|
|
readFile,
|
|
readdir,
|
|
readlink,
|
|
realpath,
|
|
stat,
|
|
writeFile,
|
|
} from 'node:fs/promises';
|
|
import os from 'node:os';
|
|
import path from 'node:path';
|
|
import readline from 'node:readline';
|
|
|
|
import type { NextFunction, Request, RequestHandler, Response } from 'express';
|
|
|
|
import type {
|
|
AnyRecord,
|
|
ApiSuccessShape,
|
|
AppErrorOptions,
|
|
NormalizedMessage,
|
|
WorkspacePathValidationResult,
|
|
} from '@/shared/types.js';
|
|
|
|
//----------------- NORMALIZED MESSAGE HELPER INPUT TYPES ------------
|
|
/**
|
|
* Input payload accepted by `createNormalizedMessage`.
|
|
*
|
|
* Callers provide provider-specific fields plus the required `kind/provider`
|
|
* pair; this helper fills missing envelope fields (`id`, `sessionId`,
|
|
* `timestamp`) in a consistent way.
|
|
*/
|
|
type NormalizedMessageInput =
|
|
{
|
|
kind: NormalizedMessage['kind'];
|
|
provider: NormalizedMessage['provider'];
|
|
id?: string | null;
|
|
sessionId?: string | null;
|
|
timestamp?: string | null;
|
|
} & Record<string, unknown>;
|
|
|
|
// ---------------------------
|
|
//----------------- HTTP HANDLER UTILITIES ------------
|
|
/**
|
|
* Wraps arbitrary data in the standard API success envelope.
|
|
*
|
|
* Use this helper in route handlers to keep successful JSON responses consistent
|
|
* across endpoints.
|
|
*/
|
|
export function createApiSuccessResponse<TData>(
|
|
data: TData,
|
|
): ApiSuccessShape<TData> {
|
|
return {
|
|
success: true,
|
|
data,
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Converts an async Express handler into a standard `RequestHandler` and routes
|
|
* rejected promises to Express error middleware.
|
|
*
|
|
* Use this to avoid repeating `try/catch(next)` in every async route.
|
|
*/
|
|
export function asyncHandler(
|
|
handler: (req: Request, res: Response, next: NextFunction) => Promise<unknown>
|
|
): RequestHandler {
|
|
return (req, res, next) => {
|
|
void Promise.resolve(handler(req, res, next)).catch(next);
|
|
};
|
|
}
|
|
|
|
// ---------------------------
|
|
//----------------- SHARED ERROR UTILITIES ------------
|
|
/**
|
|
* Shared application error with HTTP status and machine-readable code metadata.
|
|
*
|
|
* Throw this from service/route layers when the caller should receive a
|
|
* controlled error response rather than a generic 500.
|
|
*/
|
|
export class AppError extends Error {
|
|
readonly code: string;
|
|
readonly statusCode: number;
|
|
readonly details?: unknown;
|
|
|
|
constructor(message: string, options: AppErrorOptions = {}) {
|
|
super(message);
|
|
this.name = 'AppError';
|
|
this.code = options.code ?? 'INTERNAL_ERROR';
|
|
this.statusCode = options.statusCode ?? 500;
|
|
this.details = options.details;
|
|
}
|
|
}
|
|
|
|
// ---------------------------
|
|
//----------------- WORKSPACE PATH VALIDATION UTILITIES ------------
|
|
/**
|
|
* Root directory that all workspace/project paths must stay under.
|
|
*
|
|
* This is resolved from `WORKSPACES_ROOT` when configured; otherwise it falls
|
|
* back to the current user's home directory.
|
|
*/
|
|
export const WORKSPACES_ROOT = process.env.WORKSPACES_ROOT || os.homedir();
|
|
|
|
/**
|
|
* System-critical paths that must never be used as workspace roots.
|
|
*
|
|
* The validation helper blocks these values directly and also blocks paths
|
|
* nested under them (with explicit allow-list exceptions where necessary).
|
|
*/
|
|
export const FORBIDDEN_WORKSPACE_PATHS = [
|
|
// Unix
|
|
'/',
|
|
'/etc',
|
|
'/bin',
|
|
'/sbin',
|
|
'/usr',
|
|
'/dev',
|
|
'/proc',
|
|
'/sys',
|
|
'/var',
|
|
'/boot',
|
|
'/root',
|
|
'/lib',
|
|
'/lib64',
|
|
'/opt',
|
|
'/tmp',
|
|
'/run',
|
|
// Windows
|
|
'C:\\Windows',
|
|
'C:\\Program Files',
|
|
'C:\\Program Files (x86)',
|
|
'C:\\ProgramData',
|
|
'C:\\System Volume Information',
|
|
'C:\\$Recycle.Bin',
|
|
];
|
|
|
|
function stripWindowsLongPathPrefix(inputPath: string): string {
|
|
if (inputPath.startsWith('\\\\?\\UNC\\')) {
|
|
return `\\\\${inputPath.slice('\\\\?\\UNC\\'.length)}`;
|
|
}
|
|
|
|
if (inputPath.startsWith('\\\\?\\')) {
|
|
return inputPath.slice('\\\\?\\'.length);
|
|
}
|
|
|
|
return inputPath;
|
|
}
|
|
|
|
function shouldUseWindowsPathNormalization(inputPath: string): boolean {
|
|
if (process.platform === 'win32') {
|
|
return true;
|
|
}
|
|
|
|
return inputPath.startsWith('\\\\') || /^[a-zA-Z]:([\\/]|$)/.test(inputPath);
|
|
}
|
|
|
|
/**
|
|
* Canonicalizes project/workspace paths for stable DB keys and comparisons.
|
|
*
|
|
* Normalization rules:
|
|
* - trim whitespace
|
|
* - strip Windows long-path prefixes (`\\?\` and `\\?\UNC\`)
|
|
* - normalize path separators and dot segments
|
|
* - trim trailing separators except for filesystem roots
|
|
*/
|
|
export function normalizeProjectPath(inputPath: string): string {
|
|
if (typeof inputPath !== 'string') {
|
|
return '';
|
|
}
|
|
|
|
const trimmed = inputPath.trim();
|
|
if (!trimmed) {
|
|
return '';
|
|
}
|
|
|
|
const withoutLongPrefix = stripWindowsLongPathPrefix(trimmed);
|
|
const useWindowsPathRules = shouldUseWindowsPathNormalization(withoutLongPrefix);
|
|
const normalized = useWindowsPathRules
|
|
? path.win32.normalize(withoutLongPrefix)
|
|
: path.posix.normalize(withoutLongPrefix);
|
|
|
|
if (!normalized) {
|
|
return '';
|
|
}
|
|
|
|
const parser = useWindowsPathRules ? path.win32 : path.posix;
|
|
const root = parser.parse(normalized).root;
|
|
if (normalized === root) {
|
|
return normalized;
|
|
}
|
|
|
|
return normalized.replace(/[\\/]+$/, '');
|
|
}
|
|
|
|
/**
|
|
* Validates that a user-supplied workspace path is safe to use.
|
|
*
|
|
* Call this before any filesystem mutation that creates or registers projects.
|
|
* The function resolves symlinks, enforces `WORKSPACES_ROOT` containment, and
|
|
* blocks known system directories.
|
|
*/
|
|
export async function validateWorkspacePath(requestedPath: string): Promise<WorkspacePathValidationResult> {
|
|
try {
|
|
const normalizedRequestedPath = normalizeProjectPath(requestedPath);
|
|
if (!normalizedRequestedPath) {
|
|
return {
|
|
valid: false,
|
|
error: 'Workspace path is required',
|
|
};
|
|
}
|
|
|
|
const absolutePath = path.resolve(normalizedRequestedPath);
|
|
const normalizedPath = normalizeProjectPath(absolutePath);
|
|
|
|
if (FORBIDDEN_WORKSPACE_PATHS.includes(normalizedPath) || normalizedPath === '/') {
|
|
return {
|
|
valid: false,
|
|
error: 'Cannot use system-critical directories as workspace locations',
|
|
};
|
|
}
|
|
|
|
for (const forbiddenPath of FORBIDDEN_WORKSPACE_PATHS) {
|
|
const normalizedForbiddenPath = normalizeProjectPath(forbiddenPath);
|
|
if (
|
|
normalizedPath === normalizedForbiddenPath
|
|
|| normalizedPath.startsWith(`${normalizedForbiddenPath}${path.sep}`)
|
|
) {
|
|
// Allow specific user-writable folders under /var.
|
|
if (
|
|
normalizedForbiddenPath === '/var'
|
|
&& (normalizedPath.startsWith('/var/tmp') || normalizedPath.startsWith('/var/folders'))
|
|
) {
|
|
continue;
|
|
}
|
|
|
|
return {
|
|
valid: false,
|
|
error: `Cannot create workspace in system directory: ${forbiddenPath}`,
|
|
};
|
|
}
|
|
}
|
|
|
|
let resolvedPath = normalizeProjectPath(absolutePath);
|
|
try {
|
|
await access(absolutePath);
|
|
resolvedPath = normalizeProjectPath(await realpath(absolutePath));
|
|
} catch (error) {
|
|
const fileError = error as NodeJS.ErrnoException;
|
|
if (fileError.code !== 'ENOENT') {
|
|
throw fileError;
|
|
}
|
|
|
|
const parentPath = path.dirname(absolutePath);
|
|
try {
|
|
const parentRealPath = await realpath(parentPath);
|
|
resolvedPath = normalizeProjectPath(path.join(parentRealPath, path.basename(absolutePath)));
|
|
} catch (parentError) {
|
|
const parentFileError = parentError as NodeJS.ErrnoException;
|
|
if (parentFileError.code !== 'ENOENT') {
|
|
throw parentFileError;
|
|
}
|
|
}
|
|
}
|
|
|
|
const resolvedWorkspaceRoot = normalizeProjectPath(await realpath(WORKSPACES_ROOT));
|
|
if (
|
|
!resolvedPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`)
|
|
&& resolvedPath !== resolvedWorkspaceRoot
|
|
) {
|
|
return {
|
|
valid: false,
|
|
error: `Workspace path must be within the allowed workspace root: ${WORKSPACES_ROOT}`,
|
|
};
|
|
}
|
|
|
|
try {
|
|
await access(absolutePath);
|
|
const pathStats = await lstat(absolutePath);
|
|
if (pathStats.isSymbolicLink()) {
|
|
const symlinkTarget = await readlink(absolutePath);
|
|
const resolvedSymlinkPath = path.resolve(path.dirname(absolutePath), symlinkTarget);
|
|
const realSymlinkPath = await realpath(resolvedSymlinkPath);
|
|
if (
|
|
!realSymlinkPath.startsWith(`${resolvedWorkspaceRoot}${path.sep}`)
|
|
&& realSymlinkPath !== resolvedWorkspaceRoot
|
|
) {
|
|
return {
|
|
valid: false,
|
|
error: 'Symlink target is outside the allowed workspace root',
|
|
};
|
|
}
|
|
}
|
|
} catch (error) {
|
|
const fileError = error as NodeJS.ErrnoException;
|
|
if (fileError.code !== 'ENOENT') {
|
|
throw fileError;
|
|
}
|
|
}
|
|
|
|
return {
|
|
valid: true,
|
|
resolvedPath,
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
valid: false,
|
|
error: `Path validation failed: ${(error as Error).message}`,
|
|
};
|
|
}
|
|
}
|
|
|
|
// ---------------------------
|
|
//----------------- NORMALIZED PROVIDER MESSAGE UTILITIES ------------
|
|
/**
|
|
* Generates a stable unique id for normalized provider messages.
|
|
*/
|
|
export function generateMessageId(prefix = 'msg'): string {
|
|
return `${prefix}_${randomUUID()}`;
|
|
}
|
|
|
|
/**
|
|
* Creates a normalized provider message and fills the shared envelope fields.
|
|
*
|
|
* Provider adapters and live SDK handlers pass through provider-specific fields,
|
|
* while this helper guarantees every emitted event has an id, session id,
|
|
* timestamp, and provider marker.
|
|
*/
|
|
export function createNormalizedMessage(fields: NormalizedMessageInput): NormalizedMessage {
|
|
return {
|
|
...fields,
|
|
id: fields.id || generateMessageId(fields.kind),
|
|
sessionId: fields.sessionId || '',
|
|
timestamp: fields.timestamp || new Date().toISOString(),
|
|
provider: fields.provider,
|
|
};
|
|
}
|
|
|
|
// ---------------------------
|
|
//----------------- MCP CONFIG PARSING UTILITIES ------------
|
|
/**
|
|
* Safely narrows an unknown value to a plain object record.
|
|
*
|
|
* This deliberately rejects arrays, `null`, and primitive values so callers can
|
|
* treat the returned value as a JSON-style object map without repeating the same
|
|
* defensive shape checks at every config read site.
|
|
*/
|
|
export const readObjectRecord = (value: any): AnyRecord | null => {
|
|
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
return null;
|
|
}
|
|
|
|
return value as AnyRecord;
|
|
};
|
|
|
|
/**
|
|
* Reads an optional string from unknown input and normalizes empty or whitespace-only
|
|
* values to `undefined`.
|
|
*
|
|
* This is useful when parsing config files where a field may be missing, present
|
|
* with the wrong type, or present as an empty string that should be treated as
|
|
* "not configured".
|
|
*/
|
|
export const readOptionalString = (value: unknown): string | undefined => {
|
|
if (typeof value !== 'string') {
|
|
return undefined;
|
|
}
|
|
|
|
const normalized = value.trim();
|
|
return normalized.length > 0 ? normalized : undefined;
|
|
};
|
|
|
|
/**
|
|
* Reads an optional string array from unknown input.
|
|
*
|
|
* Non-array values are ignored, and any array entries that are not strings are
|
|
* filtered out. This lets provider config readers consume loosely shaped JSON/TOML
|
|
* data without failing on incidental invalid members.
|
|
*/
|
|
export const readStringArray = (value: unknown): string[] | undefined => {
|
|
if (!Array.isArray(value)) {
|
|
return undefined;
|
|
}
|
|
|
|
return value.filter((entry): entry is string => typeof entry === 'string');
|
|
};
|
|
|
|
/**
|
|
* Reads an optional string-to-string map from unknown input.
|
|
*
|
|
* The function first ensures the source value is a plain object, then keeps only
|
|
* keys whose values are strings. If no valid entries remain, it returns `undefined`
|
|
* so callers can distinguish "no usable map" from an empty object that was
|
|
* intentionally authored downstream.
|
|
*/
|
|
export const readStringRecord = (value: unknown): Record<string, string> | undefined => {
|
|
const record = readObjectRecord(value);
|
|
if (!record) {
|
|
return undefined;
|
|
}
|
|
|
|
const normalized: Record<string, string> = {};
|
|
for (const [key, entry] of Object.entries(record)) {
|
|
if (typeof entry === 'string') {
|
|
normalized[key] = entry;
|
|
}
|
|
}
|
|
|
|
return Object.keys(normalized).length > 0 ? normalized : undefined;
|
|
};
|
|
|
|
// ---------------------------
|
|
//----------------- WEBSOCKET PAYLOAD PARSING UTILITIES ------------
|
|
/**
|
|
* Parses one websocket message payload into a plain JSON object record.
|
|
*
|
|
* Use this in realtime handlers that receive raw websocket payloads as `string`,
|
|
* `Buffer`, `ArrayBuffer`, or chunk arrays. The helper converts supported
|
|
* payload formats to UTF-8 text, parses JSON, and returns only object payloads.
|
|
* Primitive/array/invalid payloads return `null` so callers can handle bad input
|
|
* without throwing from deeply nested message handlers.
|
|
*/
|
|
export const parseIncomingJsonObject = (payload: unknown): AnyRecord | null => {
|
|
let text: string | null = null;
|
|
|
|
if (typeof payload === 'string') {
|
|
text = payload;
|
|
} else if (Buffer.isBuffer(payload)) {
|
|
text = payload.toString('utf8');
|
|
} else if (payload instanceof ArrayBuffer) {
|
|
text = Buffer.from(payload).toString('utf8');
|
|
} else if (Array.isArray(payload)) {
|
|
const buffers = payload
|
|
.map((entry) => {
|
|
if (Buffer.isBuffer(entry)) {
|
|
return entry;
|
|
}
|
|
|
|
if (entry instanceof ArrayBuffer) {
|
|
return Buffer.from(entry);
|
|
}
|
|
|
|
if (ArrayBuffer.isView(entry)) {
|
|
return Buffer.from(entry.buffer, entry.byteOffset, entry.byteLength);
|
|
}
|
|
|
|
return null;
|
|
})
|
|
.filter((entry): entry is Buffer => entry !== null);
|
|
|
|
if (buffers.length > 0) {
|
|
text = Buffer.concat(buffers).toString('utf8');
|
|
}
|
|
}
|
|
|
|
if (typeof text !== 'string' || text.trim().length === 0) {
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
const parsed = JSON.parse(text) as unknown;
|
|
return readObjectRecord(parsed);
|
|
} catch {
|
|
return null;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Reads a JSON config file and guarantees a plain object result.
|
|
*
|
|
* Missing files are treated as an empty config object so provider-specific MCP
|
|
* readers can operate against first-run environments without special-case file
|
|
* existence checks. If the file exists but contains invalid JSON, the parse error
|
|
* is preserved and rethrown.
|
|
*/
|
|
export const readJsonConfig = async (filePath: string): Promise<Record<string, unknown>> => {
|
|
try {
|
|
const content = await readFile(filePath, 'utf8');
|
|
const parsed = JSON.parse(content) as Record<string, unknown>;
|
|
return readObjectRecord(parsed) ?? {};
|
|
} catch (error) {
|
|
const code = (error as NodeJS.ErrnoException).code;
|
|
if (code === 'ENOENT') {
|
|
return {};
|
|
}
|
|
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Writes a JSON config file with stable, human-readable formatting.
|
|
*
|
|
* The parent directory is created automatically so callers can persist config into
|
|
* provider-specific folders without pre-creating the directory tree. Output always
|
|
* ends with a trailing newline to keep the file diff-friendly.
|
|
*/
|
|
export const writeJsonConfig = async (filePath: string, data: Record<string, unknown>): Promise<void> => {
|
|
await mkdir(path.dirname(filePath), { recursive: true });
|
|
await writeFile(filePath, `${JSON.stringify(data, null, 2)}\n`, 'utf8');
|
|
};
|
|
|
|
// ---------------------------
|
|
//----------------- SESSION SYNCHRONIZER TITLE HELPERS ------------
|
|
/**
|
|
* Produces a compact session title suitable for UI rendering and DB storage.
|
|
*
|
|
* Use this when converting provider-native names into a consistent title value.
|
|
* The helper collapses repeated whitespace, trims the result, and truncates it
|
|
* to 120 characters so every provider writes stable and bounded metadata.
|
|
* If the normalized input is empty, it returns the supplied fallback title.
|
|
*/
|
|
export function normalizeSessionName(rawValue: string | undefined, fallback: string): string {
|
|
const normalized = (rawValue ?? '').replace(/\s+/g, ' ').trim();
|
|
if (!normalized) {
|
|
return fallback;
|
|
}
|
|
|
|
return normalized.slice(0, 120);
|
|
}
|
|
|
|
// ---------------------------
|
|
//----------------- SESSION SYNCHRONIZER FILESYSTEM HELPERS ------------
|
|
/**
|
|
* Recursively discovers files that match one extension, with optional incremental filtering.
|
|
*
|
|
* Provider synchronizers call this to find transcript artifacts under provider
|
|
* home directories. Pass `lastScanAt` to include only files created after the
|
|
* previous scan, or pass `null` to perform a full rescan. Missing directories
|
|
* are treated as empty because not every provider exists on every machine.
|
|
*/
|
|
export async function findFilesRecursivelyCreatedAfter(
|
|
rootDir: string,
|
|
extension: string,
|
|
lastScanAt: Date | null,
|
|
fileList: string[] = []
|
|
): Promise<string[]> {
|
|
try {
|
|
const entries = await readdir(rootDir, { withFileTypes: true });
|
|
for (const entry of entries) {
|
|
const fullPath = path.join(rootDir, entry.name);
|
|
|
|
if (entry.isDirectory()) {
|
|
await findFilesRecursivelyCreatedAfter(fullPath, extension, lastScanAt, fileList);
|
|
continue;
|
|
}
|
|
|
|
if (!entry.isFile() || !entry.name.endsWith(extension)) {
|
|
continue;
|
|
}
|
|
|
|
if (!lastScanAt) {
|
|
fileList.push(fullPath);
|
|
continue;
|
|
}
|
|
|
|
const fileStat = await stat(fullPath);
|
|
if (fileStat.birthtime > lastScanAt) {
|
|
fileList.push(fullPath);
|
|
}
|
|
}
|
|
} catch {
|
|
// Missing provider folders are expected in first-run or partial setups.
|
|
}
|
|
|
|
return fileList;
|
|
}
|
|
|
|
/**
|
|
* Reads file creation/update timestamps and maps them to DB-friendly ISO strings.
|
|
*
|
|
* Session indexers use this to persist `created_at` and `updated_at` metadata
|
|
* when upserting sessions. If the file cannot be read, an empty object is
|
|
* returned so indexing can continue for other files.
|
|
*/
|
|
export async function readFileTimestamps(
|
|
filePath: string
|
|
): Promise<{ createdAt?: string; updatedAt?: string }> {
|
|
try {
|
|
const fileStat = await stat(filePath);
|
|
return {
|
|
createdAt: fileStat.birthtime.toISOString(),
|
|
updatedAt: fileStat.mtime.toISOString(),
|
|
};
|
|
} catch {
|
|
return {};
|
|
}
|
|
}
|
|
|
|
// ---------------------------
|
|
//----------------- SESSION SYNCHRONIZER JSONL PARSING HELPERS ------------
|
|
/**
|
|
* Builds a first-seen key/value lookup map from a JSONL file.
|
|
*
|
|
* Use this for provider index files where session id -> display name metadata
|
|
* is stored line-by-line. The first value for each key wins, preserving the
|
|
* earliest known label while avoiding repeated map overwrites.
|
|
*/
|
|
export async function buildLookupMap(
|
|
filePath: string,
|
|
keyField: string,
|
|
valueField: string
|
|
): Promise<Map<string, string>> {
|
|
const lookup = new Map<string, string>();
|
|
|
|
try {
|
|
const fileStream = fs.createReadStream(filePath);
|
|
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
|
|
|
for await (const line of lineReader) {
|
|
const trimmed = line.trim();
|
|
if (!trimmed) {
|
|
continue;
|
|
}
|
|
|
|
const parsed = JSON.parse(trimmed) as Record<string, unknown>;
|
|
const key = parsed[keyField];
|
|
const value = parsed[valueField];
|
|
|
|
if (typeof key === 'string' && typeof value === 'string' && !lookup.has(key)) {
|
|
lookup.set(key, value);
|
|
}
|
|
}
|
|
} catch {
|
|
// Missing or unreadable lookup files should not block session sync.
|
|
}
|
|
|
|
return lookup;
|
|
}
|
|
|
|
/**
|
|
* Reads a JSONL file and returns the first extracted payload that matches caller criteria.
|
|
*
|
|
* The caller supplies an `extractor` that validates provider-specific row
|
|
* shapes. This helper centralizes line-by-line parsing and lets indexers stop
|
|
* scanning as soon as one valid row is found.
|
|
*/
|
|
export async function extractFirstValidJsonlData<T>(
|
|
filePath: string,
|
|
extractor: (parsedJson: unknown) => T | null | undefined
|
|
): Promise<T | null> {
|
|
try {
|
|
const fileStream = fs.createReadStream(filePath);
|
|
const lineReader = readline.createInterface({ input: fileStream, crlfDelay: Infinity });
|
|
|
|
for await (const line of lineReader) {
|
|
const trimmed = line.trim();
|
|
if (!trimmed) {
|
|
continue;
|
|
}
|
|
|
|
const parsed = JSON.parse(trimmed);
|
|
const extracted = extractor(parsed);
|
|
if (extracted) {
|
|
lineReader.close();
|
|
fileStream.close();
|
|
return extracted;
|
|
}
|
|
}
|
|
} catch {
|
|
// Ignore malformed or missing artifacts so full scans keep progressing.
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|