From 995a758d9ca1e40376abc66ca68c653de69e94bb Mon Sep 17 00:00:00 2001 From: chengyongru Date: Mon, 30 Mar 2026 16:56:36 +0800 Subject: [PATCH] feat(anthropic): map cache_read_input_tokens to cached_tokens --- nanobot/providers/anthropic_provider.py | 4 +++ tests/providers/test_cached_tokens.py | 40 +++++++++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/nanobot/providers/anthropic_provider.py b/nanobot/providers/anthropic_provider.py index 3c789e730..fabcd5656 100644 --- a/nanobot/providers/anthropic_provider.py +++ b/nanobot/providers/anthropic_provider.py @@ -379,6 +379,10 @@ class AnthropicProvider(LLMProvider): val = getattr(response.usage, attr, 0) if val: usage[attr] = val + # Normalize to cached_tokens for downstream consistency. + cache_read = usage.get("cache_read_input_tokens", 0) + if cache_read: + usage["cached_tokens"] = cache_read return LLMResponse( content="".join(content_parts) or None, diff --git a/tests/providers/test_cached_tokens.py b/tests/providers/test_cached_tokens.py index 8bc2555d1..fce22cf65 100644 --- a/tests/providers/test_cached_tokens.py +++ b/tests/providers/test_cached_tokens.py @@ -189,3 +189,43 @@ def test_extract_usage_priority_nested_over_top_level_dict(): } result = p._parse(response) assert result.usage["cached_tokens"] == 100 + + +def test_anthropic_maps_cache_fields_to_cached_tokens(): + """Anthropic's cache_read_input_tokens should map to cached_tokens.""" + from nanobot.providers.anthropic_provider import AnthropicProvider + + usage_obj = FakeUsage( + input_tokens=800, + output_tokens=200, + cache_creation_input_tokens=0, + cache_read_input_tokens=1200, + ) + content_block = FakeUsage(type="text", text="hello") + response = FakeUsage( + id="msg_1", + type="message", + stop_reason="end_turn", + content=[content_block], + usage=usage_obj, + ) + result = AnthropicProvider._parse_response(response) + assert result.usage["cached_tokens"] == 1200 + assert result.usage["prompt_tokens"] == 800 + + +def test_anthropic_no_cache_fields(): + """Anthropic response without cache fields should not have cached_tokens.""" + from nanobot.providers.anthropic_provider import AnthropicProvider + + usage_obj = FakeUsage(input_tokens=800, output_tokens=200) + content_block = FakeUsage(type="text", text="hello") + response = FakeUsage( + id="msg_1", + type="message", + stop_reason="end_turn", + content=[content_block], + usage=usage_obj, + ) + result = AnthropicProvider._parse_response(response) + assert "cached_tokens" not in result.usage