Feat: [Beta] Merging claudecode sessions on the sessions list based on whether it's a continuation or not - Trying to bypass https://github.com/anthropics/claude-code/issues/2354

This commit is contained in:
simos
2025-09-11 14:30:24 +00:00
parent 4ca78ba69a
commit 06bb5feb09
2 changed files with 74 additions and 39 deletions

View File

@@ -532,7 +532,7 @@ async function getSessions(projectName, limit = 5, offset = 0) {
return { sessions: [], hasMore: false, total: 0 }; return { sessions: [], hasMore: false, total: 0 };
} }
// For performance, get file stats to sort by modification time // Sort files by modification time (newest first)
const filesWithStats = await Promise.all( const filesWithStats = await Promise.all(
jsonlFiles.map(async (file) => { jsonlFiles.map(async (file) => {
const filePath = path.join(projectDir, file); const filePath = path.join(projectDir, file);
@@ -540,40 +540,84 @@ async function getSessions(projectName, limit = 5, offset = 0) {
return { file, mtime: stats.mtime }; return { file, mtime: stats.mtime };
}) })
); );
// Sort files by modification time (newest first) for better performance
filesWithStats.sort((a, b) => b.mtime - a.mtime); filesWithStats.sort((a, b) => b.mtime - a.mtime);
const allSessions = new Map(); const allSessions = new Map();
let processedCount = 0; const allEntries = [];
const uuidToSessionMap = new Map();
// Process files in order of modification time // Collect all sessions and entries from all files
for (const { file } of filesWithStats) { for (const { file } of filesWithStats) {
const jsonlFile = path.join(projectDir, file); const jsonlFile = path.join(projectDir, file);
const sessions = await parseJsonlSessions(jsonlFile); const result = await parseJsonlSessions(jsonlFile);
// Merge sessions, avoiding duplicates by session ID result.sessions.forEach(session => {
sessions.forEach(session => {
if (!allSessions.has(session.id)) { if (!allSessions.has(session.id)) {
allSessions.set(session.id, session); allSessions.set(session.id, session);
} }
}); });
processedCount++; allEntries.push(...result.entries);
// Early exit optimization: if we have enough sessions and processed recent files // Early exit optimization for large projects
if (allSessions.size >= (limit + offset) * 2 && processedCount >= Math.min(3, filesWithStats.length)) { if (allSessions.size >= (limit + offset) * 2 && allEntries.length >= Math.min(3, filesWithStats.length)) {
break; break;
} }
} }
// Convert to array and sort by last activity // Build UUID-to-session mapping for timeline detection
const sortedSessions = Array.from(allSessions.values()).sort((a, b) => allEntries.forEach(entry => {
new Date(b.lastActivity) - new Date(a.lastActivity) if (entry.uuid && entry.sessionId) {
); uuidToSessionMap.set(entry.uuid, entry.sessionId);
}
});
const total = sortedSessions.length; // Detect session continuations using leafUuid
const paginatedSessions = sortedSessions.slice(offset, offset + limit); const sessionContinuations = new Map();
let pendingContinuationInfo = null;
allEntries.forEach(entry => {
// Summary entries without sessionId indicate a session continuation
if (entry.type === 'summary' && !entry.sessionId && (entry.leafUuid || entry.leafUUID)) {
pendingContinuationInfo = {
leafUuid: entry.leafUuid || entry.leafUUID,
summary: entry.summary || 'Continued Session'
};
return;
}
if (entry.sessionId) {
const session = allSessions.get(entry.sessionId);
// Apply pending continuation info
if (session && pendingContinuationInfo) {
const previousSession = uuidToSessionMap.get(pendingContinuationInfo.leafUuid);
if (previousSession) {
session.summary = pendingContinuationInfo.summary;
sessionContinuations.set(entry.sessionId, previousSession);
}
pendingContinuationInfo = null;
}
// Handle summary entries with sessionId that have leafUuid
if (entry.type === 'summary' && (entry.leafUuid || entry.leafUUID)) {
const leafUuid = entry.leafUuid || entry.leafUUID;
const previousSession = uuidToSessionMap.get(leafUuid);
if (previousSession && session) {
sessionContinuations.set(entry.sessionId, previousSession);
}
}
}
});
// Filter out continued sessions - only show the latest in each timeline
const continuedSessions = new Set(sessionContinuations.values());
const visibleSessions = Array.from(allSessions.values())
.filter(session => !continuedSessions.has(session.id))
.sort((a, b) => new Date(b.lastActivity) - new Date(a.lastActivity));
const total = visibleSessions.length;
const paginatedSessions = visibleSessions.slice(offset, offset + limit);
const hasMore = offset + limit < total; const hasMore = offset + limit < total;
return { return {
@@ -591,6 +635,7 @@ async function getSessions(projectName, limit = 5, offset = 0) {
async function parseJsonlSessions(filePath) { async function parseJsonlSessions(filePath) {
const sessions = new Map(); const sessions = new Map();
const entries = [];
try { try {
const fileStream = fsSync.createReadStream(filePath); const fileStream = fsSync.createReadStream(filePath);
@@ -599,14 +644,11 @@ async function parseJsonlSessions(filePath) {
crlfDelay: Infinity crlfDelay: Infinity
}); });
// console.log(`[JSONL Parser] Reading file: ${filePath}`);
let lineCount = 0;
for await (const line of rl) { for await (const line of rl) {
if (line.trim()) { if (line.trim()) {
lineCount++;
try { try {
const entry = JSON.parse(line); const entry = JSON.parse(line);
entries.push(entry);
if (entry.sessionId) { if (entry.sessionId) {
if (!sessions.has(entry.sessionId)) { if (!sessions.has(entry.sessionId)) {
@@ -621,43 +663,37 @@ async function parseJsonlSessions(filePath) {
const session = sessions.get(entry.sessionId); const session = sessions.get(entry.sessionId);
// Update summary if this is a summary entry // Update summary from summary entries or first user message
if (entry.type === 'summary' && entry.summary) { if (entry.type === 'summary' && entry.summary) {
session.summary = entry.summary; session.summary = entry.summary;
} else if (entry.message?.role === 'user' && entry.message?.content && session.summary === 'New Session') { } else if (entry.message?.role === 'user' && entry.message?.content && session.summary === 'New Session') {
// Use first user message as summary if no summary entry exists
const content = entry.message.content; const content = entry.message.content;
if (typeof content === 'string' && content.length > 0) { if (typeof content === 'string' && content.length > 0 && !content.startsWith('<command-name>')) {
// Skip command messages that start with <command-name> session.summary = content.length > 50 ? content.substring(0, 50) + '...' : content;
if (!content.startsWith('<command-name>')) {
session.summary = content.length > 50 ? content.substring(0, 50) + '...' : content;
}
} }
} }
// Count messages instead of storing them all session.messageCount++;
session.messageCount = (session.messageCount || 0) + 1;
// Update last activity
if (entry.timestamp) { if (entry.timestamp) {
session.lastActivity = new Date(entry.timestamp); session.lastActivity = new Date(entry.timestamp);
} }
} }
} catch (parseError) { } catch (parseError) {
console.warn(`[JSONL Parser] Error parsing line ${lineCount}:`, parseError.message); // Skip malformed lines silently
} }
} }
} }
// console.log(`[JSONL Parser] Processed ${lineCount} lines, found ${sessions.size} sessions`); return {
sessions: Array.from(sessions.values()),
entries: entries
};
} catch (error) { } catch (error) {
console.error('Error reading JSONL file:', error); console.error('Error reading JSONL file:', error);
return { sessions: [], entries: [] };
} }
// Convert Map to Array and sort by last activity
return Array.from(sessions.values()).sort((a, b) =>
new Date(b.lastActivity) - new Date(a.lastActivity)
);
} }
// Get messages for a specific session with pagination support // Get messages for a specific session with pagination support

View File

@@ -1 +0,0 @@
hello world 5