Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion tools/session-token-scan/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ Useful options:
- `--limit 0`: scan all discovered rollout files under the inputs.
- `--json`: emit machine-readable output for PR evidence or later classifiers.
- `--usage-root ~/.code/usage`: correlate timestamped usage entries when
available.
available. When the usage root contains multiple account files, pass
`--account-id <id>` to avoid mixing unrelated account usage into a session.
- `--account-id <id>`: filter usage correlation to one account.
- `--large-threshold 16384`: set the byte threshold for large-record suspects.

## Reports
Expand Down
37 changes: 33 additions & 4 deletions tools/session-token-scan/scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from dataclasses import dataclass, field
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Iterable
from typing import Any, Iterable, Sequence


TOKEN_KEYS = ("input_tokens", "cached_input_tokens", "output_tokens", "reasoning_output_tokens", "total_tokens")
Expand Down Expand Up @@ -90,6 +90,7 @@ class SessionReport:
large_payloads: list[LargePayload] = field(default_factory=list)
usage_entries: int = 0
usage_tokens: TokenUsage = field(default_factory=TokenUsage)
usage_note: str | None = None

@property
def final_total(self) -> TokenUsage:
Expand Down Expand Up @@ -364,9 +365,26 @@ def load_usage_entries(root: Path) -> list[tuple[datetime, TokenUsage, str]]:
return entries


def attach_usage(reports: list[SessionReport], usage_entries: list[tuple[datetime, TokenUsage, str]]) -> None:
def attach_usage(
reports: list[SessionReport],
usage_entries: Sequence[tuple[datetime, TokenUsage, str]],
account_id: str | None = None,
) -> None:
if not usage_entries:
return
if account_id is None:
accounts = {account for _timestamp, _tokens, account in usage_entries}
if len(accounts) != 1:
note = f"usage correlation skipped; pass --account-id for one of {', '.join(sorted(accounts))}"
for report in reports:
report.usage_note = note
return
account_id = next(iter(accounts))
elif all(account != account_id for _timestamp, _tokens, account in usage_entries):
note = f"usage correlation skipped; account id {account_id!r} was not found in usage entries"
for report in reports:
report.usage_note = note
return
for report in reports:
start = parse_timestamp(report.started_at)
end = parse_timestamp(report.ended_at)
Expand All @@ -376,7 +394,9 @@ def attach_usage(reports: list[SessionReport], usage_entries: list[tuple[datetim
start = start.replace(tzinfo=timezone.utc)
if end.tzinfo is None:
end = end.replace(tzinfo=timezone.utc)
for timestamp, tokens, _account in usage_entries:
for timestamp, tokens, account in usage_entries:
if account != account_id:
continue
if start <= timestamp <= end:
report.usage_entries += 1
report.usage_tokens.add(tokens)
Expand Down Expand Up @@ -411,6 +431,7 @@ def to_jsonable(report: SessionReport) -> dict[str, Any]:
"agent_event_count": report.agent_event_count,
"usage_entries": report.usage_entries,
"usage_tokens": report.usage_tokens.to_dict(),
"usage_note": report.usage_note,
"large_payloads": [payload.__dict__ for payload in report.large_payloads],
"token_events": [
{
Expand Down Expand Up @@ -493,6 +514,13 @@ def print_text_report(reports: list[SessionReport], top: int) -> None:
)
print()

usage_notes = sorted({report.usage_note for report in reports if report.usage_note})
if usage_notes:
print("## Usage Correlation Notes")
for note in usage_notes:
print(f"- {note}")
print()


def event_summary(report: SessionReport, event: TokenEvent) -> dict[str, Any]:
return {
Expand All @@ -512,6 +540,7 @@ def parse_args(argv: list[str]) -> argparse.Namespace:
parser.add_argument("--top", type=int, default=10, help="Number of top suspects to print per section.")
parser.add_argument("--large-threshold", type=int, default=16_384, help="Record/string byte threshold for large-record suspects.")
parser.add_argument("--usage-root", default="~/.code/usage", help="Usage directory for optional timestamp correlation.")
parser.add_argument("--account-id", help="Only correlate usage entries for this account id. Required when --usage-root has multiple accounts.")
parser.add_argument("--json", action="store_true", help="Emit machine-readable JSON instead of text.")
return parser.parse_args(argv)

Expand All @@ -521,7 +550,7 @@ def main(argv: list[str]) -> int:
limit = None if args.limit == 0 else args.limit
sessions = discover_sessions(args.inputs, args.pattern, limit)
reports = [analyze_session(path, args.large_threshold, args.top) for path in sessions]
attach_usage(reports, load_usage_entries(Path(args.usage_root).expanduser()))
attach_usage(reports, load_usage_entries(Path(args.usage_root).expanduser()), args.account_id)
if args.json:
print(json.dumps({"sessions": [to_jsonable(report) for report in reports]}, indent=2, sort_keys=True))
else:
Expand Down
74 changes: 74 additions & 0 deletions tools/session-token-scan/test_scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import json
import sys
import unittest
from datetime import datetime
from pathlib import Path
from tempfile import TemporaryDirectory

Expand All @@ -11,6 +12,12 @@
import scan


def ts(value: str) -> datetime:
timestamp = scan.parse_timestamp(value)
assert timestamp is not None
return timestamp


class SessionTokenScanTests(unittest.TestCase):
def test_analyze_session_reports_expected_suspects(self) -> None:
with TemporaryDirectory() as directory:
Expand Down Expand Up @@ -58,6 +65,73 @@ def test_analyze_session_reports_expected_suspects(self) -> None:
self.assertEqual(report.token_total_resets, 1)
self.assertTrue(report.large_payloads)

def test_attach_usage_does_not_mix_multiple_accounts_without_filter(self) -> None:
report = scan.SessionReport(
path="rollout-test.jsonl",
bytes=0,
started_at="2026-05-12T00:00:00Z",
ended_at="2026-05-12T00:10:00Z",
)
usage_entries = [
(ts("2026-05-12T00:01:00Z"), scan.TokenUsage(total_tokens=100), "acct-a"),
(ts("2026-05-12T00:02:00Z"), scan.TokenUsage(total_tokens=200), "acct-b"),
]

scan.attach_usage([report], usage_entries)

self.assertEqual(report.usage_entries, 0)
self.assertEqual(report.usage_tokens.total_tokens, 0)
self.assertIn("pass --account-id", report.usage_note or "")

def test_attach_usage_filters_by_requested_account(self) -> None:
report = scan.SessionReport(
path="rollout-test.jsonl",
bytes=0,
started_at="2026-05-12T00:00:00Z",
ended_at="2026-05-12T00:10:00Z",
)
usage_entries = [
(ts("2026-05-12T00:01:00Z"), scan.TokenUsage(total_tokens=100), "acct-a"),
(ts("2026-05-12T00:02:00Z"), scan.TokenUsage(total_tokens=200), "acct-b"),
]

scan.attach_usage([report], usage_entries, account_id="acct-b")

self.assertEqual(report.usage_entries, 1)
self.assertEqual(report.usage_tokens.total_tokens, 200)

def test_attach_usage_reports_unknown_requested_account(self) -> None:
report = scan.SessionReport(
path="rollout-test.jsonl",
bytes=0,
started_at="2026-05-12T00:00:00Z",
ended_at="2026-05-12T00:10:00Z",
)
usage_entries = [
(ts("2026-05-12T00:01:00Z"), scan.TokenUsage(total_tokens=100), "acct-a"),
]

scan.attach_usage([report], usage_entries, account_id="acct-missing")

self.assertEqual(report.usage_entries, 0)
self.assertIn("was not found", report.usage_note or "")

def test_attach_usage_correlates_single_account_without_filter(self) -> None:
report = scan.SessionReport(
path="rollout-test.jsonl",
bytes=0,
started_at="2026-05-12T00:00:00Z",
ended_at="2026-05-12T00:10:00Z",
)
usage_entries = [
(ts("2026-05-12T00:01:00Z"), scan.TokenUsage(total_tokens=100), "acct-a"),
]

scan.attach_usage([report], usage_entries)

self.assertEqual(report.usage_entries, 1)
self.assertEqual(report.usage_tokens.total_tokens, 100)


def token_count(timestamp: str, *, total: int, last: int) -> dict[str, object]:
return {
Expand Down