-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtoken_tracker.py
More file actions
134 lines (114 loc) · 4.49 KB
/
token_tracker.py
File metadata and controls
134 lines (114 loc) · 4.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
"""
token_tracker.py — Token usage tracking for github-fixer.
Writes per-issue JSONL records to ~/.github-fixer/token_usage.jsonl using
usage data returned directly by the Anthropic API (response.usage).
"""
from __future__ import annotations
import json
import logging
import threading
from dataclasses import asdict, dataclass
from datetime import datetime, timezone
from pathlib import Path
logger = logging.getLogger(__name__)
TRACKER_DIR = Path.home() / ".github-fixer"
USAGE_FILE = TRACKER_DIR / "token_usage.jsonl"
_write_lock = threading.Lock()
SEP = "─" * 80
# --------------------------------------------------------------------------- #
# Per-issue JSONL record #
# --------------------------------------------------------------------------- #
@dataclass
class UsageRecord:
timestamp: str
issue_ref: str
input_tokens: int
output_tokens: int
cache_creation_tokens: int
cache_read_tokens: int
cost_usd: float
def record_usage(
issue_ref: str,
usage: dict | None,
cost_usd: float | None,
) -> None:
"""Append one per-issue usage record to USAGE_FILE. Never raises.
The ``usage`` dict should contain the fields from anthropic.types.Usage:
input_tokens, output_tokens,
cache_creation_input_tokens (optional),
cache_read_input_tokens (optional).
"""
try:
record = UsageRecord(
timestamp=datetime.now(timezone.utc).isoformat(),
issue_ref=issue_ref,
input_tokens=int((usage or {}).get("input_tokens", 0)),
output_tokens=int((usage or {}).get("output_tokens", 0)),
cache_creation_tokens=int((usage or {}).get("cache_creation_input_tokens", 0)),
cache_read_tokens=int((usage or {}).get("cache_read_input_tokens", 0)),
cost_usd=float(cost_usd or 0.0),
)
TRACKER_DIR.mkdir(parents=True, exist_ok=True)
line = json.dumps(asdict(record)) + "\n"
with _write_lock:
with USAGE_FILE.open("a", encoding="utf-8") as f:
f.write(line)
except Exception:
logger.warning("Failed to record token usage", exc_info=True)
# --------------------------------------------------------------------------- #
# Usage summary printer #
# --------------------------------------------------------------------------- #
def print_usage_summary(
issue_ref: str = "",
last_usage: dict | None = None,
last_cost: float | None = None,
) -> None:
"""Print a brief token usage summary to stdout. Never raises."""
try:
_print_summary(issue_ref, last_usage, last_cost)
except Exception:
logger.warning("Failed to print token usage summary", exc_info=True)
def _print_summary(
issue_ref: str,
last_usage: dict | None,
last_cost: float | None,
) -> None:
header = " Anthropic API Usage Summary"
if issue_ref:
header += f" ({issue_ref}"
if last_usage:
inp = last_usage.get("input_tokens", 0)
out = last_usage.get("output_tokens", 0)
cache_read = last_usage.get("cache_read_input_tokens", 0)
cache_create = last_usage.get("cache_creation_input_tokens", 0)
header += f" — this run: {inp:,} in / {out:,} out"
if cache_read or cache_create:
header += f" / {cache_read:,} cache-read / {cache_create:,} cache-create"
if last_cost:
header += f" / ${last_cost:.4f}"
header += ")"
print(SEP)
print(header)
print(SEP)
# Show cumulative history from the JSONL log
if USAGE_FILE.exists():
try:
records = [
json.loads(line)
for line in USAGE_FILE.read_text(encoding="utf-8").splitlines()
if line.strip()
]
if records:
total_in = sum(r.get("input_tokens", 0) for r in records)
total_out = sum(r.get("output_tokens", 0) for r in records)
total_cost = sum(r.get("cost_usd", 0.0) for r in records)
print(f" Cumulative ({len(records)} runs logged):")
print(f" {total_in:>14,} input tokens")
print(f" {total_out:>14,} output tokens")
if total_cost:
print(f" ${total_cost:.4f} estimated cost")
except Exception:
pass
else:
print(" (no usage history — token_usage.jsonl not found)")
print(SEP)