feat(providers): add AnthropicOAuthProvider with Bearer auth

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
wylab
2026-02-13 12:58:36 +01:00
parent 7b710116a4
commit 534a8344bd
2 changed files with 295 additions and 0 deletions

View File

@@ -0,0 +1,207 @@
"""Anthropic OAuth provider - direct API calls with Bearer auth.
This provider bypasses litellm to properly handle OAuth tokens
which require Authorization: Bearer header instead of x-api-key.
"""
import json
from typing import Any
import httpx
from nanobot.providers.base import LLMProvider, LLMResponse, ToolCallRequest
from nanobot.providers.oauth_utils import get_auth_headers, get_claude_code_system_prefix
class AnthropicOAuthProvider(LLMProvider):
"""
Anthropic provider using OAuth token authentication.
Unlike the LiteLLM provider, this calls the Anthropic API directly
with proper Bearer token authentication for Claude Max/Pro subscriptions.
"""
ANTHROPIC_API_URL = "https://api.anthropic.com/v1/messages"
def __init__(
self,
oauth_token: str,
default_model: str = "claude-opus-4-5",
api_base: str | None = None,
):
super().__init__(api_key=None, api_base=api_base)
self.oauth_token = oauth_token
self.default_model = default_model
self._client: httpx.AsyncClient | None = None
def _get_headers(self) -> dict[str, str]:
"""Get request headers with Bearer auth."""
return get_auth_headers(self.oauth_token, is_oauth=True)
def _get_api_url(self) -> str:
"""Get API endpoint URL."""
if self.api_base:
return f"{self.api_base.rstrip('/')}/v1/messages"
return self.ANTHROPIC_API_URL
async def _get_client(self) -> httpx.AsyncClient:
"""Get or create async HTTP client."""
if self._client is None:
self._client = httpx.AsyncClient(timeout=300.0)
return self._client
def _prepare_messages(
self,
messages: list[dict[str, Any]]
) -> tuple[str | None, list[dict[str, Any]]]:
"""Prepare messages, extracting system prompt and adding Claude Code identity.
Returns (system_prompt, messages_without_system)
"""
system_parts = [get_claude_code_system_prefix()]
filtered_messages = []
for msg in messages:
if msg.get("role") == "system":
system_parts.append(msg.get("content", ""))
else:
filtered_messages.append(msg)
system_prompt = "\n\n".join(system_parts)
return system_prompt, filtered_messages
def _convert_tools_to_anthropic(
self,
tools: list[dict[str, Any]] | None
) -> list[dict[str, Any]] | None:
"""Convert OpenAI-format tools to Anthropic format."""
if not tools:
return None
anthropic_tools = []
for tool in tools:
if tool.get("type") == "function":
func = tool["function"]
anthropic_tools.append({
"name": func["name"],
"description": func.get("description", ""),
"input_schema": func.get("parameters", {"type": "object", "properties": {}})
})
return anthropic_tools if anthropic_tools else None
async def _make_request(
self,
messages: list[dict[str, Any]],
system: str | None = None,
model: str = "claude-opus-4-5",
max_tokens: int = 4096,
temperature: float = 0.7,
tools: list[dict[str, Any]] | None = None,
) -> dict[str, Any]:
"""Make request to Anthropic API."""
client = await self._get_client()
payload: dict[str, Any] = {
"model": model,
"messages": messages,
"max_tokens": max_tokens,
"temperature": temperature,
}
if system:
payload["system"] = system
if tools:
payload["tools"] = tools
response = await client.post(
self._get_api_url(),
headers=self._get_headers(),
json=payload,
)
if response.status_code != 200:
error_text = response.text
raise Exception(f"Anthropic API error {response.status_code}: {error_text}")
return response.json()
async def chat(
self,
messages: list[dict[str, Any]],
tools: list[dict[str, Any]] | None = None,
model: str | None = None,
max_tokens: int = 4096,
temperature: float = 0.7,
) -> LLMResponse:
"""Send chat completion request to Anthropic API."""
model = model or self.default_model
# Strip provider prefix if present (e.g. "anthropic/claude-opus-4-5" -> "claude-opus-4-5")
if "/" in model:
model = model.split("/")[-1]
system, prepared_messages = self._prepare_messages(messages)
anthropic_tools = self._convert_tools_to_anthropic(tools)
try:
response = await self._make_request(
messages=prepared_messages,
system=system,
model=model,
max_tokens=max_tokens,
temperature=temperature,
tools=anthropic_tools,
)
return self._parse_response(response)
except Exception as e:
return LLMResponse(
content=f"Error calling LLM: {str(e)}",
finish_reason="error",
)
def _parse_response(self, response: dict[str, Any]) -> LLMResponse:
"""Parse Anthropic API response."""
content_blocks = response.get("content", [])
text_content = ""
tool_calls = []
for block in content_blocks:
if block.get("type") == "text":
text_content += block.get("text", "")
elif block.get("type") == "tool_use":
tool_calls.append(ToolCallRequest(
id=block.get("id", ""),
name=block.get("name", ""),
arguments=block.get("input", {}),
))
usage = {}
if "usage" in response:
usage = {
"prompt_tokens": response["usage"].get("input_tokens", 0),
"completion_tokens": response["usage"].get("output_tokens", 0),
"total_tokens": (
response["usage"].get("input_tokens", 0) +
response["usage"].get("output_tokens", 0)
),
}
return LLMResponse(
content=text_content or None,
tool_calls=tool_calls,
finish_reason=response.get("stop_reason", "end_turn"),
usage=usage,
)
def get_default_model(self) -> str:
"""Get the default model."""
return self.default_model
async def close(self):
"""Close the HTTP client."""
if self._client:
await self._client.aclose()
self._client = None

View File

@@ -0,0 +1,88 @@
"""Test Anthropic OAuth provider."""
import pytest
from unittest.mock import AsyncMock, patch, MagicMock
from nanobot.providers.anthropic_oauth import AnthropicOAuthProvider
from nanobot.providers.base import LLMResponse
@pytest.fixture
def provider():
"""Create provider with test OAuth token."""
return AnthropicOAuthProvider(
oauth_token="sk-ant-oat01-test-token",
default_model="claude-opus-4-5"
)
def test_provider_init(provider):
"""Provider should initialize with OAuth token."""
assert provider.oauth_token == "sk-ant-oat01-test-token"
assert provider.default_model == "claude-opus-4-5"
def test_provider_uses_bearer_auth(provider):
"""Provider should use Bearer auth, not x-api-key."""
headers = provider._get_headers()
assert "Authorization" in headers
assert headers["Authorization"].startswith("Bearer ")
assert "x-api-key" not in headers
@pytest.mark.asyncio
async def test_chat_prepends_system_prompt(provider):
"""Chat should prepend Claude Code identity to system prompt."""
messages = [{"role": "user", "content": "Hello"}]
with patch.object(provider, "_make_request", new_callable=AsyncMock) as mock:
mock.return_value = {"content": [{"type": "text", "text": "Hi"}], "stop_reason": "end_turn"}
await provider.chat(messages)
call_args = mock.call_args
system = call_args[1]["system"]
assert "Claude Code" in system
def test_parse_response_text(provider):
"""Should parse text response correctly."""
response = {
"content": [{"type": "text", "text": "Hello world"}],
"stop_reason": "end_turn",
"usage": {"input_tokens": 10, "output_tokens": 5},
}
result = provider._parse_response(response)
assert result.content == "Hello world"
assert result.finish_reason == "end_turn"
assert result.usage["prompt_tokens"] == 10
def test_parse_response_tool_calls(provider):
"""Should parse tool call response correctly."""
response = {
"content": [
{"type": "tool_use", "id": "call_1", "name": "read_file", "input": {"path": "/tmp/test"}}
],
"stop_reason": "tool_use",
"usage": {"input_tokens": 10, "output_tokens": 5},
}
result = provider._parse_response(response)
assert len(result.tool_calls) == 1
assert result.tool_calls[0].name == "read_file"
assert result.tool_calls[0].arguments == {"path": "/tmp/test"}
def test_convert_tools_to_anthropic(provider):
"""Should convert OpenAI-format tools to Anthropic format."""
openai_tools = [
{
"type": "function",
"function": {
"name": "read_file",
"description": "Read a file",
"parameters": {"type": "object", "properties": {"path": {"type": "string"}}}
}
}
]
anthropic_tools = provider._convert_tools_to_anthropic(openai_tools)
assert len(anthropic_tools) == 1
assert anthropic_tools[0]["name"] == "read_file"
assert "input_schema" in anthropic_tools[0]