feat(anthropic): map cache_read_input_tokens to cached_tokens

This commit is contained in:
chengyongru 2026-03-30 16:56:36 +08:00
parent 720cf222a3
commit 995a758d9c
2 changed files with 44 additions and 0 deletions

View File

@ -379,6 +379,10 @@ class AnthropicProvider(LLMProvider):
val = getattr(response.usage, attr, 0)
if val:
usage[attr] = val
# Normalize to cached_tokens for downstream consistency.
cache_read = usage.get("cache_read_input_tokens", 0)
if cache_read:
usage["cached_tokens"] = cache_read
return LLMResponse(
content="".join(content_parts) or None,

View File

@ -189,3 +189,43 @@ def test_extract_usage_priority_nested_over_top_level_dict():
}
result = p._parse(response)
assert result.usage["cached_tokens"] == 100
def test_anthropic_maps_cache_fields_to_cached_tokens():
"""Anthropic's cache_read_input_tokens should map to cached_tokens."""
from nanobot.providers.anthropic_provider import AnthropicProvider
usage_obj = FakeUsage(
input_tokens=800,
output_tokens=200,
cache_creation_input_tokens=0,
cache_read_input_tokens=1200,
)
content_block = FakeUsage(type="text", text="hello")
response = FakeUsage(
id="msg_1",
type="message",
stop_reason="end_turn",
content=[content_block],
usage=usage_obj,
)
result = AnthropicProvider._parse_response(response)
assert result.usage["cached_tokens"] == 1200
assert result.usage["prompt_tokens"] == 800
def test_anthropic_no_cache_fields():
"""Anthropic response without cache fields should not have cached_tokens."""
from nanobot.providers.anthropic_provider import AnthropicProvider
usage_obj = FakeUsage(input_tokens=800, output_tokens=200)
content_block = FakeUsage(type="text", text="hello")
response = FakeUsage(
id="msg_1",
type="message",
stop_reason="end_turn",
content=[content_block],
usage=usage_obj,
)
result = AnthropicProvider._parse_response(response)
assert "cached_tokens" not in result.usage