-
-
Notifications
You must be signed in to change notification settings - Fork 5.1k
Description
What happened?
A bug happened!
litellm/proxy/litellm_pre_call_utils.py
def move_guardrails_to_metadata(
data: dict,
_metadata_variable_name: str,
user_api_key_dict: UserAPIKeyAuth,
):
"""
Helper to add guardrails from request to metadata
- If guardrails set on API Key metadata then sets guardrails on request metadata
- If guardrails not set on API key, then checks request metadata
"""
# Check key-level guardrails
_add_guardrails_from_key_or_team_metadata(
key_metadata=user_api_key_dict.metadata,
team_metadata=user_api_key_dict.team_metadata,
data=data,
metadata_variable_name=_metadata_variable_name,
)
#########################################################################################
# User's might send "guardrails" in the request body, we need to add them to the request metadata.
# Since downstream logic requires "guardrails" to be in the request metadata
#########################################################################################
if "guardrails" in data:
request_body_guardrails = data.pop("guardrails")
if "guardrails" in data[_metadata_variable_name] and isinstance(
data[_metadata_variable_name]["guardrails"], list
):
data[_metadata_variable_name]["guardrails"].extend(request_body_guardrails)
else:
data[_metadata_variable_name]["guardrails"] = request_body_guardrails
#########################################################################################
if "guardrail_config" in data:
request_body_guardrail_config = data.pop("guardrail_config")
if "guardrail_config" in data[_metadata_variable_name] and isinstance(
data[_metadata_variable_name]["guardrail_config"], dict
):
data[_metadata_variable_name]["guardrail_config"].update(
request_body_guardrail_config
)
else:
data[_metadata_variable_name][
"guardrail_config"
] = request_body_guardrail_config
litellm/integrations/custom_guardrail.py
def get_guardrail_from_metadata(
self, data: dict
) -> Union[List[str], List[Dict[str, DynamicGuardrailParams]]]:
"""
Returns the guardrail(s) to be run from the metadata or root
"""
if "guardrails" in data:
return data["guardrails"]
metadata = data.get("litellm_metadata") or data.get("metadata", {})
return metadata.get("guardrails") or []
This is my config.yaml
environment_variables:
PRESIDIO_ANALYZER_API_BASE: ""
PRESIDIO_ANONYMIZER_API_BASE: ""
model_list:
model_name: bedrock-claude-4-sonnet
litellm_params:
model_params
litellm_settings:
drop_params: True
callbacks: ["presidio", "custom_callback.proxy_handler_instance"]
guardrails:
guardrail_name: "presidio-pii-guard"
litellm_params:
guardrail: presidio
mode: "pre_call" # Run before LLM call
default_on: true
pii_entities_config:
EMAIL_ADDRESS: "MASK"
PHONE_NUMBER: "MASK"
PERSON: "MASK"
CREDIT_CARD: "MASK"
MEDICAL_LICENSE: "MASK"
US_BANK_NUMBER: "MASK"
US_DRIVER_LICENSE: "MASK"
US_ITIN: "MASK"
US_PASSPORT: "MASK"
US_SSN: "MASK"
UK_NHS: "MASK"
UK_NINO: "MASK"
This is my custom_callback.proxy_handler_instance
class MyCustomHandler(CustomLogger):
async def async_pre_call_hook(
self,
user_api_key_dict: UserAPIKeyAuth,
cache: DualCache,
data: dict,
call_type: Literal["completion", "text_completion", "embeddings", "image_generation", "moderation", "audio_transcription", "pass_through_endpoint", "rerank"],
) -> Optional[dict]:
"""Extract session_id from x-litellm-session-id header"""
# Get headers from proxy_server_request
headers = {}
if "proxy_server_request" in data:
headers = data["proxy_server_request"].get("headers", {})
# Also check metadata["headers"]
if not headers and "metadata" in data:
headers = data.get("metadata", {}).get("headers", {})
# Extract session_id (headers are case-insensitive)
session_id = None
guardrails = None
for key, value in headers.items():
if key.lower() == "x-litellm-session-id":
session_id = value
if session_id:
# Ensure metadata dict exists
if "metadata" not in data:
data["metadata"] = {}
data["litellm_session_id"] = session_id
print(f"[SessionIdExtractor] Set session_id: {session_id}")
headers = data.get("proxy_server_request", {}).get("headers", {})
guardrails_header = headers.get("x-litellm-guardrails")
if guardrails_header:
# Parse the guardrail names (comma-separated)
guardrail_names = [g.strip() for g in guardrails_header.split(",")]
# Set guardrails DIRECTLY in metadata (not in data["guardrails"])
if "metadata" not in data:
data["metadata"] = {}
# Merge with any existing guardrails
existing_guardrails = data["metadata"].get("guardrails", [])
if isinstance(existing_guardrails, list):
for g in guardrail_names:
if g not in existing_guardrails:
existing_guardrails.append(g)
data["metadata"]["guardrails"] = existing_guardrails
data["guardrails"] = existing_guardrails
print("async_pre_call_hook", data)
return data
proxy_handler_instance = MyCustomHandler()
I can see the guardrails are being set in the data['guardrails'] using above proxy handler. But getting below error when using the guardrails.
Exception: TypedDict does not support instance and class checks
Traceback: Traceback (most recent call last):
File "/workspaces/code/litellm/env/lib/python3.12/site-packages/litellm/utils.py", line 1527, in wrapper_async
result = await async_post_call_success_deployment_hook(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspaces/code/litellm/env/lib/python3.12/site-packages/litellm/utils.py", line 969, in async_post_call_success_deployment_hook
result = await callback.async_post_call_success_deployment_hook(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspaces/code/litellm/env/lib/python3.12/site-packages/litellm/integrations/custom_guardrail.py", line 250, in async_post_call_success_deployment_hook
if result is None or not isinstance(result, get_args(LLMResponseTypes)):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspaces/code/litellm/env/lib/python3.12/site-packages/typing_extensions.py", line 1247, in subclasscheck
raise TypeError('TypedDict does not support instance and class checks')
TypeError: TypedDict does not support instance and class checks
Relevant log output
What part of LiteLLM is this about?
Proxy
What LiteLLM version are you on ?
v1.80.7