Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions litellm/interactions/http_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
This module handles the HTTP communication for the Google Interactions API.
"""

import json
from typing import (
Any,
AsyncIterator,
Expand All @@ -18,7 +17,6 @@
import httpx

import litellm
from litellm._logging import verbose_logger
from litellm.constants import request_timeout
from litellm.interactions.streaming_iterator import (
InteractionsAPIStreamingIterator,
Expand Down
4 changes: 1 addition & 3 deletions litellm/interactions/streaming_iterator.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,10 @@
import asyncio
import json
from datetime import datetime
from typing import Any, Dict, Iterator, Optional
from typing import Any, Dict, Optional

import httpx

import litellm
from litellm._logging import verbose_logger
from litellm.constants import STREAM_SSE_DONE_STRING
from litellm.litellm_core_utils.asyncify import run_async_function
Expand All @@ -22,7 +21,6 @@
from litellm.litellm_core_utils.thread_pool_executor import executor
from litellm.llms.base_llm.interactions.transformation import BaseInteractionsAPIConfig
from litellm.types.interactions import (
InteractionsAPIResponse,
InteractionsAPIStreamingResponse,
)
from litellm.utils import CustomStreamWrapper
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,30 @@ def validate_anthropic_messages_environment(
)

headers["content-type"] = "application/json"


# Get exclusion list from litellm_params
exclude_anthropic_beta_values = litellm_params.get("exclude_anthropic_beta_values") or []

# Filter existing anthropic-beta header if present and exclusions are configured
if "anthropic-beta" in headers and exclude_anthropic_beta_values:
existing_betas = [b.strip() for b in headers["anthropic-beta"].split(",")]
filtered_betas = [b for b in existing_betas if b not in exclude_anthropic_beta_values]
if filtered_betas:
headers["anthropic-beta"] = ",".join(filtered_betas)
else:
del headers["anthropic-beta"]

# Add web search beta header for Vertex AI only if not already set
if "anthropic-beta" not in headers:
tools = optional_params.get("tools", [])
for tool in tools:
if isinstance(tool, dict) and tool.get("type", "").startswith(ANTHROPIC_HOSTED_TOOLS.WEB_SEARCH.value):
headers["anthropic-beta"] = ANTHROPIC_BETA_HEADER_VALUES.WEB_SEARCH_2025_03_05.value
web_search_beta = ANTHROPIC_BETA_HEADER_VALUES.WEB_SEARCH_2025_03_05.value
# Only add if not in exclusion list
if web_search_beta not in exclude_anthropic_beta_values:
headers["anthropic-beta"] = web_search_beta
break

return headers, api_base

def get_complete_url(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,14 @@ def transform_request(
if tool_search_used:
beta_set.add("tool-search-tool-2025-10-19") # Vertex requires this header for tool search

# Filter out excluded anthropic beta values if configured
exclude_anthropic_beta_values = litellm_params.get("exclude_anthropic_beta_values") or []
if exclude_anthropic_beta_values:
beta_set = {b for b in beta_set if b not in exclude_anthropic_beta_values}

if beta_set:
data["anthropic_beta"] = list(beta_set)

return data

def transform_response(
Expand Down
4 changes: 1 addition & 3 deletions litellm/proxy/google_endpoints/endpoints.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
from typing import Optional

from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
from fastapi import APIRouter, Depends, HTTPException, Request, Response
from fastapi.responses import ORJSONResponse, StreamingResponse

from litellm.proxy._types import *
Expand Down
5 changes: 5 additions & 0 deletions litellm/types/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,9 @@ class GenericLiteLLMParams(CredentialLiteLLMParams, CustomPricingLiteLLMParams):
vector_store_id: Optional[str] = None
milvus_text_field: Optional[str] = None

# Anthropic Beta Header Filtering
exclude_anthropic_beta_values: Optional[List[str]] = None

def __init__(
self,
custom_llm_provider: Optional[str] = None,
Expand Down Expand Up @@ -266,6 +269,8 @@ def __init__(
s3_bucket_name: Optional[str] = None,
s3_encryption_key_id: Optional[str] = None,
gcs_bucket_name: Optional[str] = None,
# Anthropic Beta Header Filtering
exclude_anthropic_beta_values: Optional[List[str]] = None,
**params,
):
args = locals()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,3 +98,132 @@ def test_web_search_header_not_added_without_tool():
# Assert that the anthropic-beta header is NOT present when no web search tool
assert "anthropic-beta" not in updated_headers, \
"anthropic-beta header should not be present without web search tool"


class TestExcludeAnthropicBetaValuesPassThrough:
"""Tests for exclude_anthropic_beta_values in pass-through handler"""

def test_exclude_filters_existing_header_values(self):
"""Test that exclude_anthropic_beta_values filters values from existing anthropic-beta header"""
config = VertexAIPartnerModelsAnthropicMessagesConfig()
headers = {"anthropic-beta": "prompt-caching-2024-07-31,context-1m-2025-08-07"}
litellm_params = {
"vertex_ai_project": "test-project",
"vertex_ai_location": "us-central1",
"vertex_credentials": "{}",
"exclude_anthropic_beta_values": ["context-1m-2025-08-07"],
}
optional_params = {}

with patch.object(
config, "_ensure_access_token", return_value=("token", "test-project")
), patch.object(
config, "get_complete_vertex_url", return_value="https://mock-url"
):
updated_headers, api_base = config.validate_anthropic_messages_environment(
headers=headers,
model="claude-sonnet-4",
messages=[],
optional_params=optional_params,
litellm_params=litellm_params,
api_base=None,
)

# context-1m-2025-08-07 should be filtered out
assert "anthropic-beta" in updated_headers
assert "context-1m-2025-08-07" not in updated_headers["anthropic-beta"]
assert "prompt-caching-2024-07-31" in updated_headers["anthropic-beta"]

def test_exclude_removes_header_when_all_values_filtered(self):
"""Test that anthropic-beta header is removed when all values are excluded"""
config = VertexAIPartnerModelsAnthropicMessagesConfig()
headers = {"anthropic-beta": "context-1m-2025-08-07"}
litellm_params = {
"vertex_ai_project": "test-project",
"vertex_ai_location": "us-central1",
"vertex_credentials": "{}",
"exclude_anthropic_beta_values": ["context-1m-2025-08-07"],
}
optional_params = {}

with patch.object(
config, "_ensure_access_token", return_value=("token", "test-project")
), patch.object(
config, "get_complete_vertex_url", return_value="https://mock-url"
):
updated_headers, api_base = config.validate_anthropic_messages_environment(
headers=headers,
model="claude-sonnet-4",
messages=[],
optional_params=optional_params,
litellm_params=litellm_params,
api_base=None,
)

# anthropic-beta header should be removed entirely
assert "anthropic-beta" not in updated_headers

def test_exclude_prevents_auto_added_web_search_header(self):
"""Test that exclude_anthropic_beta_values can prevent auto-added web search header"""
config = VertexAIPartnerModelsAnthropicMessagesConfig()
headers = {}
litellm_params = {
"vertex_ai_project": "test-project",
"vertex_ai_location": "us-central1",
"vertex_credentials": "{}",
"exclude_anthropic_beta_values": ["web-search-2025-03-05"],
}
# Include web search tool that would normally add the header
optional_params = {
"tools": [
{"type": "web_search_20250305", "name": "web_search", "max_uses": 5}
]
}

with patch.object(
config, "_ensure_access_token", return_value=("token", "test-project")
), patch.object(
config, "get_complete_vertex_url", return_value="https://mock-url"
):
updated_headers, api_base = config.validate_anthropic_messages_environment(
headers=headers,
model="claude-sonnet-4",
messages=[],
optional_params=optional_params,
litellm_params=litellm_params,
api_base=None,
)

# web-search header should NOT be added because it's in the exclude list
assert "anthropic-beta" not in updated_headers

def test_empty_exclude_list_preserves_headers(self):
"""Test that empty exclude list doesn't filter any headers"""
config = VertexAIPartnerModelsAnthropicMessagesConfig()
headers = {"anthropic-beta": "prompt-caching-2024-07-31,context-1m-2025-08-07"}
litellm_params = {
"vertex_ai_project": "test-project",
"vertex_ai_location": "us-central1",
"vertex_credentials": "{}",
"exclude_anthropic_beta_values": [],
}
optional_params = {}

with patch.object(
config, "_ensure_access_token", return_value=("token", "test-project")
), patch.object(
config, "get_complete_vertex_url", return_value="https://mock-url"
):
updated_headers, api_base = config.validate_anthropic_messages_environment(
headers=headers,
model="claude-sonnet-4",
messages=[],
optional_params=optional_params,
litellm_params=litellm_params,
api_base=None,
)

# All values should be preserved
assert "anthropic-beta" in updated_headers
assert "prompt-caching-2024-07-31" in updated_headers["anthropic-beta"]
assert "context-1m-2025-08-07" in updated_headers["anthropic-beta"]
Loading
Loading