Skip to content

Morph Fast Apply + Embeddings + Rerank #10970

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions litellm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,6 +434,7 @@ def identify(event_details):
codestral_models: List = []
friendliai_models: List = []
featherless_ai_models: List = []
morph_models: List = []
palm_models: List = []
groq_models: List = []
azure_models: List = []
Expand Down Expand Up @@ -611,6 +612,8 @@ def add_known_models():
snowflake_models.append(key)
elif value.get("litellm_provider") == "featherless_ai":
featherless_ai_models.append(key)
elif value.get("litellm_provider") == "morph":
morph_models.append(key)


add_known_models()
Expand Down Expand Up @@ -691,6 +694,7 @@ def add_known_models():
+ llama_models
+ featherless_ai_models
+ nscale_models
+ morph_models
)

model_list_set = set(model_list)
Expand Down Expand Up @@ -752,6 +756,7 @@ def add_known_models():
"meta_llama": llama_models,
"nscale": nscale_models,
"featherless_ai": featherless_ai_models,
"morph": morph_models,
}

# mapping for those models which have larger equivalents
Expand Down Expand Up @@ -852,6 +857,9 @@ def add_known_models():
from .llms.galadriel.chat.transformation import GaladrielChatConfig
from .llms.github.chat.transformation import GithubChatConfig
from .llms.empower.chat.transformation import EmpowerChatConfig
from .llms.morph.chat.transformation import MorphChatConfig
from .llms.morph.embedding.transformation import MorphEmbeddingConfig
from .llms.morph.rerank.transformation import MorphRerankConfig
from .llms.huggingface.chat.transformation import HuggingFaceChatConfig
from .llms.huggingface.embedding.transformation import HuggingFaceEmbeddingConfig
from .llms.oobabooga.chat.transformation import OobaboogaConfig
Expand Down
9 changes: 9 additions & 0 deletions litellm/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,7 @@
"meta_llama",
"featherless_ai",
"nscale",
"morph",
]

LITELLM_EMBEDDING_PROVIDERS_SUPPORTING_INPUT_ARRAY_OF_TOKENS = [
Expand Down Expand Up @@ -295,6 +296,7 @@
"api.llama.com/compat/v1/",
"api.featherless.ai/v1",
"inference.api.nscale.com/v1",
"api.morphllm.com/v1",
]


Expand Down Expand Up @@ -326,6 +328,7 @@
"lm_studio",
"galadriel",
"novita",
"morph",
"meta_llama",
"featherless_ai",
"nscale",
Expand Down Expand Up @@ -496,6 +499,12 @@
"ProdeusUnity/Stellar-Odyssey-12b-v0.0",
]

morph_models: List = [
"morph-v2",
"morph-embedding-v2",
"morph-rerank-v2",
]

BEDROCK_INVOKE_PROVIDERS_LITERAL = Literal[
"cohere",
"anthropic",
Expand Down
10 changes: 10 additions & 0 deletions litellm/litellm_core_utils/get_llm_provider_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,9 @@ def get_llm_provider( # noqa: PLR0915
elif endpoint == "https://api.featherless.ai/v1":
custom_llm_provider = "featherless_ai"
dynamic_api_key = get_secret_str("FEATHERLESS_AI_API_KEY")
elif endpoint == "https://api.morphllm.com/v1":
custom_llm_provider = "morph"
dynamic_api_key = api_key or get_secret_str("MORPH_API_KEY")
elif endpoint == litellm.NscaleConfig.API_BASE_URL:
custom_llm_provider = "nscale"
dynamic_api_key = litellm.NscaleConfig.get_api_key()
Expand Down Expand Up @@ -628,6 +631,13 @@ def _get_openai_compatible_provider_info( # noqa: PLR0915
) = litellm.FeatherlessAIConfig()._get_openai_compatible_provider_info(
api_base, api_key
)
elif custom_llm_provider == "morph":
(
api_base,
dynamic_api_key,
) = litellm.MorphChatConfig()._get_openai_compatible_provider_info(
api_base, api_key
)
elif custom_llm_provider == "nscale":
(
api_base,
Expand Down
Empty file added litellm/llms/morph/__init__.py
Empty file.
Empty file.
9 changes: 9 additions & 0 deletions litellm/llms/morph/chat/transformation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
"""
Translate from OpenAI's `/v1/chat/completions` to Empower's `/v1/chat/completions`
"""

from ...openai_like.chat.transformation import OpenAILikeChatConfig


class MorphChatConfig(OpenAILikeChatConfig):
pass
Empty file.
134 changes: 134 additions & 0 deletions litellm/llms/morph/embedding/transformation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
"""
Translate from OpenAI's `/v1/embeddings` to Morph's `/v1/embeddings`
"""

from typing import Any, Dict, List, Optional, Tuple, Union

import httpx

from litellm.llms.base_llm.chat.transformation import BaseLLMException
from litellm.llms.base_llm.embedding.transformation import BaseEmbeddingConfig
from litellm.secret_managers.main import get_secret_str
from litellm.types.llms.openai import AllMessageValues
from litellm.types.utils import EmbeddingResponse


class MorphError(BaseLLMException):
"""
Exception raised for Morph API errors.
"""
pass


class MorphEmbeddingConfig(BaseEmbeddingConfig):
"""
Reference: https://docs.morphllm.com/api-reference/endpoint/embeddings

Morph provides an OpenAI-compatible embeddings API.
"""

def __init__(self) -> None:
pass

def _get_openai_compatible_provider_info(
self, api_base: Optional[str], api_key: Optional[str]
) -> Tuple[Optional[str], Optional[str]]:
# Morph is OpenAI compatible, set to custom_openai and use Morph's endpoint
api_base = (
api_base
or get_secret_str("MORPH_API_BASE")
or "https://api.morphllm.com/v1"
)
dynamic_api_key = api_key or get_secret_str("MORPH_API_KEY")
return api_base, dynamic_api_key

def get_supported_openai_params(self, model: str) -> List[str]:
"""Get the list of parameters supported by Morph embeddings"""
return [
"input",
"model",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

input and model are not optional params - this is just meant to document the optional params

"dimensions",
"encoding_format",
"user"
]

def map_openai_params(
self,
non_default_params: Dict[str, Any],
optional_params: Dict[str, Any],
model: str,
drop_params: bool = False,
) -> Dict[str, Any]:
"""Map OpenAI parameters to Morph parameters"""
supported_params = self.get_supported_openai_params(model)
for param, value in non_default_params.items():
if param in supported_params:
optional_params[param] = value
return optional_params

def transform_embedding_request(
self,
model: str,
input: Union[str, List[str]],
optional_params: Dict[str, Any],
) -> Dict[str, Any]:
"""Transform embedding request for Morph API"""
request_data = {
"model": model.replace("morph/", ""),
"input": input,
}

# Add optional parameters
for param in self.get_supported_openai_params(model):
if param in optional_params and param not in ["model", "input"]:
request_data[param] = optional_params[param]

return request_data

def transform_embedding_response(
self,
model: str,
response: httpx.Response,
embedding_response: EmbeddingResponse,
) -> EmbeddingResponse:
"""Transform embedding response from Morph API"""
try:
response_json = response.json()
# Morph follows OpenAI format so we don't need any special transformations
return EmbeddingResponse(**response_json)
except Exception:
raise MorphError(
message=response.text,
status_code=response.status_code
)

def validate_environment(
self,
headers: Dict[str, Any],
model: str,
messages: Optional[List[AllMessageValues]] = None,
optional_params: Optional[Dict[str, Any]] = None,
litellm_params: Optional[Dict[str, Any]] = None,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
) -> Dict[str, Any]:
"""Validate that the necessary API key is available."""
if optional_params is None:
optional_params = {}

if api_key is None:
api_key = get_secret_str("MORPH_API_KEY")

if api_key is None:
raise ValueError("Morph API key is required. Please set 'MORPH_API_KEY' environment variable.")

headers["Authorization"] = f"Bearer {api_key}"
headers["Content-Type"] = "application/json"

return headers

def get_error_class(
self, error_message: str, status_code: int, headers: Union[Dict[str, Any], httpx.Headers]
) -> BaseLLMException:
"""Return the appropriate error class for Morph API errors"""
return MorphError(message=error_message, status_code=status_code)
Empty file.
Loading
Loading