Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,15 @@ extension CostUsageScanner {
fileURL: URL,
range: CostUsageDayRange,
providerFilter: ClaudeLogProviderFilter,
startOffset: Int64 = 0) -> ClaudeParseResult
startOffset: Int64 = 0,
existingSeenKeys: Set<String> = []) -> ClaudeParseResult
{
var days: [String: [String: [Int]]] = [:]
// Track seen message+request IDs to deduplicate streaming chunks within a JSONL file.
// Track seen message+request IDs to deduplicate streaming chunks.
// Claude emits multiple lines per message with cumulative usage, so we only count once.
var seenKeys: Set<String> = []
// Uses existingSeenKeys from prior files to also deduplicate across files
// (e.g. subagent logs that duplicate parent session entries).
var seenKeys = existingSeenKeys

struct ClaudeTokens: Sendable {
let input: Int
Expand Down Expand Up @@ -133,7 +136,7 @@ extension CostUsageScanner {
add(dayKey: dayKey, model: model, tokens: tokens)
})) ?? startOffset

return ClaudeParseResult(days: days, parsedBytes: parsedBytes)
return ClaudeParseResult(days: days, parsedBytes: parsedBytes, seenKeys: seenKeys)
}

private static let vertexProviderKeys: Set<String> = [
Expand Down Expand Up @@ -263,6 +266,9 @@ extension CostUsageScanner {
var touched: Set<String>
let range: CostUsageDayRange
let providerFilter: ClaudeLogProviderFilter
/// Shared across all files in a scan pass to deduplicate messages
/// that appear in both a parent session JSONL and its subagent files.
var globalSeenKeys: Set<String> = []

init(cache: CostUsageCache, range: CostUsageDayRange, providerFilter: ClaudeLogProviderFilter) {
self.cache = cache
Expand Down Expand Up @@ -296,7 +302,9 @@ extension CostUsageScanner {
fileURL: url,
range: state.range,
providerFilter: state.providerFilter,
startOffset: startOffset)
startOffset: startOffset,
existingSeenKeys: state.globalSeenKeys)
state.globalSeenKeys = delta.seenKeys
if !delta.days.isEmpty {
Self.applyFileDays(cache: &state.cache, fileDays: delta.days, sign: 1)
}
Expand All @@ -317,7 +325,9 @@ extension CostUsageScanner {
let parsed = Self.parseClaudeFile(
fileURL: url,
range: state.range,
providerFilter: state.providerFilter)
providerFilter: state.providerFilter,
existingSeenKeys: state.globalSeenKeys)
state.globalSeenKeys = parsed.seenKeys
Comment on lines +329 to +330
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Recompute duplicate ownership when files are removed

Passing globalSeenKeys into each parse makes later duplicate files persist with empty days, but that per-file attribution is then cached. If the file that originally “won” a duplicate key is deleted in a later refresh, stale cleanup subtracts its usage while the surviving duplicate file is skipped as unchanged, so those tokens vanish from totals until a forced rescan. This creates incorrect undercounting in normal log-rotation/deletion flows for Claude sessions with parent/subagent duplicates.

Useful? React with 👍 / 👎.

let usage = Self.makeFileUsage(
mtimeUnixMs: mtimeMs,
size: size,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ enum CostUsageScanner {
struct ClaudeParseResult {
let days: [String: [String: [Int]]]
let parsedBytes: Int64
let seenKeys: Set<String>
}

static func loadDailyReport(
Expand Down