Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions litellm/llms/anthropic/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,3 +536,25 @@ def process_anthropic_headers(headers: Union[httpx.Headers, dict]) -> dict:

additional_headers = {**llm_response_headers, **openai_headers}
return additional_headers


def get_anthropic_beta_from_headers(headers: dict) -> List[str]:
"""
Extract anthropic-beta header values and convert them to a list.
Supports comma-separated values from user headers.

Used by both converse and invoke transformations for consistent handling
of anthropic-beta headers that should be passed to AWS Bedrock.

Args:
headers (dict): Request headers dictionary

Returns:
List[str]: List of anthropic beta feature strings, empty list if no header
"""
anthropic_beta_header = headers.get("anthropic-beta")
if not anthropic_beta_header:
return []

# Split comma-separated values and strip whitespace
return [beta.strip() for beta in anthropic_beta_header.split(",")]
3 changes: 2 additions & 1 deletion litellm/llms/bedrock/chat/converse_transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,11 @@
)
from litellm.utils import add_dummy_tool, has_tool_call_blocks, supports_reasoning

from litellm.llms.anthropic.common_utils import get_anthropic_beta_from_headers

from ..common_utils import (
BedrockError,
BedrockModelInfo,
get_anthropic_beta_from_headers,
get_bedrock_tool_name,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
import httpx

from litellm.llms.anthropic.chat.transformation import AnthropicConfig
from litellm.llms.anthropic.common_utils import get_anthropic_beta_from_headers
from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import (
AmazonInvokeConfig,
)
from litellm.llms.bedrock.common_utils import get_anthropic_beta_from_headers
from litellm.types.llms.anthropic import ANTHROPIC_TOOL_SEARCH_BETA_HEADER
from litellm.types.llms.openai import AllMessageValues
from litellm.types.utils import ModelResponse
Expand Down
22 changes: 0 additions & 22 deletions litellm/llms/bedrock/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -708,28 +708,6 @@ def _parse_message_from_event(self, event) -> Optional[str]:
return chunk.decode() # type: ignore[no-any-return]


def get_anthropic_beta_from_headers(headers: dict) -> List[str]:
"""
Extract anthropic-beta header values and convert them to a list.
Supports comma-separated values from user headers.

Used by both converse and invoke transformations for consistent handling
of anthropic-beta headers that should be passed to AWS Bedrock.

Args:
headers (dict): Request headers dictionary

Returns:
List[str]: List of anthropic beta feature strings, empty list if no header
"""
anthropic_beta_header = headers.get("anthropic-beta")
if not anthropic_beta_header:
return []

# Split comma-separated values and strip whitespace
return [beta.strip() for beta in anthropic_beta_header.split(",")]


class CommonBatchFilesUtils:
"""
Common utilities for Bedrock batch and file operations.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@

import httpx

from litellm.llms.anthropic.common_utils import AnthropicModelInfo
from litellm.llms.anthropic.common_utils import (
AnthropicModelInfo,
get_anthropic_beta_from_headers,
)
from litellm.llms.anthropic.experimental_pass_through.messages.transformation import (
AnthropicMessagesConfig,
)
Expand All @@ -23,7 +26,6 @@
from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import (
AmazonInvokeConfig,
)
from litellm.llms.bedrock.common_utils import get_anthropic_beta_from_headers
from litellm.types.llms.anthropic import ANTHROPIC_TOOL_SEARCH_BETA_HEADER
from litellm.types.llms.openai import AllMessageValues
from litellm.types.router import GenericLiteLLMParams
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import httpx

import litellm
from litellm.llms.anthropic.common_utils import get_anthropic_beta_from_headers
from litellm.llms.base_llm.chat.transformation import LiteLLMLoggingObj
from litellm.types.llms.openai import AllMessageValues
from litellm.types.utils import ModelResponse
Expand Down Expand Up @@ -71,16 +72,21 @@ def transform_request(

tools = optional_params.get("tools")
tool_search_used = self.is_tool_search_used(tools)

# Start with user's anthropic-beta header
beta_set = set(get_anthropic_beta_from_headers(headers))

# Add auto-detected betas, with prompt_caching disabled for Vertex
auto_betas = self.get_anthropic_beta_list(
model=model,
optional_params=optional_params,
computer_tool_used=self.is_computer_tool_used(tools),
prompt_caching_set=self.is_cache_control_set(messages),
prompt_caching_set=False, # Disable prompt caching for Vertex
file_id_used=self.is_file_id_used(messages),
mcp_server_used=self.is_mcp_server_used(optional_params.get("mcp_servers")),
)

beta_set = set(auto_betas)
beta_set.update(auto_betas)

if tool_search_used:
beta_set.add("tool-search-tool-2025-10-19") # Vertex requires this header for tool search

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@

import pytest

from litellm.llms.anthropic.common_utils import get_anthropic_beta_from_headers
from litellm.llms.bedrock.chat.converse_transformation import AmazonConverseConfig
from litellm.llms.bedrock.chat.invoke_transformations.anthropic_claude3_transformation import (
AmazonAnthropicClaudeConfig,
)
from litellm.llms.bedrock.common_utils import get_anthropic_beta_from_headers
from litellm.llms.bedrock.messages.invoke_transformations.anthropic_claude3_transformation import (
AmazonAnthropicClaudeMessagesConfig,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from litellm.llms.vertex_ai.vertex_ai_partner_models.anthropic.transformation import (
VertexAIAnthropicConfig,
)
from litellm.types.llms.openai import AllMessageValues


@pytest.mark.parametrize(
Expand Down Expand Up @@ -115,3 +116,114 @@ def test_vertex_ai_anthropic_structured_output_header_not_added():
"Non-Vertex request SHOULD have anthropic-beta header for structured output"
assert result_non_vertex["anthropic-beta"] == "structured-outputs-2025-11-13", \
f"Expected 'structured-outputs-2025-11-13', got: {result_non_vertex.get('anthropic-beta')}"


def test_vertex_ai_anthropic_user_beta_headers_preserved():
"""Test that user-provided anthropic-beta headers are preserved and combined with auto-detected ones"""
config = VertexAIAnthropicConfig()

# Test with user-provided beta header
headers = {
"anthropic-beta": "custom-feature-1,custom-feature-2"
}

messages = [
{"role": "user", "content": "Hello"}
]

optional_params = {
"tools": [{"type": "computer_20250124", "name": "computer"}]
}

litellm_params = {}

result = config.transform_request(
model="claude-3-5-sonnet",
messages=messages,
optional_params=optional_params,
litellm_params=litellm_params,
headers=headers,
)

# Check that anthropic_beta is in the result
assert "anthropic_beta" in result, "anthropic_beta should be in the result"

# Check that user headers are preserved
assert "custom-feature-1" in result["anthropic_beta"], \
f"User header 'custom-feature-1' should be preserved, got: {result['anthropic_beta']}"
assert "custom-feature-2" in result["anthropic_beta"], \
f"User header 'custom-feature-2' should be preserved, got: {result['anthropic_beta']}"

# Check that auto-detected beta (computer-use) is also added
assert "computer-use-2025-01-24" in result["anthropic_beta"], \
f"Auto-detected 'computer-use-2025-01-24' should be added, got: {result['anthropic_beta']}"


def test_vertex_ai_anthropic_prompt_caching_disabled():
"""Test that prompt caching beta header is NOT added for Vertex AI even when cache_control is set"""
config = VertexAIAnthropicConfig()

# Messages with cache_control set
messages: list[AllMessageValues] = [
{
"role": "user",
"content": [
{
"type": "text",
"text": "Hello",
"cache_control": {"type": "ephemeral"}
}
]
}
]

optional_params = {}
litellm_params = {}
headers = {}

result = config.transform_request(
model="claude-3-5-sonnet",
messages=messages,
optional_params=optional_params,
litellm_params=litellm_params,
headers=headers,
)

# Check that prompt caching beta header is NOT added
if "anthropic_beta" in result:
assert "prompt-caching-2024-07-31" not in result["anthropic_beta"], \
f"Prompt caching beta header should NOT be added for Vertex AI, got: {result['anthropic_beta']}"


def test_vertex_ai_anthropic_tool_search_beta_header():
"""Test that tool search adds the correct beta header for Vertex AI"""
config = VertexAIAnthropicConfig()

messages = [
{"role": "user", "content": "Search for something"}
]

optional_params = {
"tools": [
{
"type": "tool_search_tool_regex_20251119",
"name": "tool_search",
}
]
}

litellm_params = {}
headers = {}

result = config.transform_request(
model="claude-3-5-sonnet",
messages=messages,
optional_params=optional_params,
litellm_params=litellm_params,
headers=headers,
)

# Check that tool search beta header is added
assert "anthropic_beta" in result, "anthropic_beta should be in the result"
assert "tool-search-tool-2025-10-19" in result["anthropic_beta"], \
f"Tool search beta header should be added for Vertex AI, got: {result['anthropic_beta']}"
Loading