Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ GOOGLE_API_KEY=your-google-api-key-here
MISTRAL_API_KEY=your-mistral-api-key-here

# Hugging Face
HUGGINGFACE_TOKEN=your-huggingface-token-here
HF_TOKEN=your-huggingface-token-here

# Stability AI
STABILITYAI_API_KEY=your-stabilityai-api-key-here
Expand All @@ -31,6 +31,21 @@ COHERE_API_KEY=your-cohere-api-key-here
# xAI
XAI_API_KEY=your-xai-api-key-here

# BytePlus
BYTEPLUS_API_KEY=your-byteplus-api-key-here

# ElevenLabs
ELEVENLABS_API_KEY=your-elevenlabs-api-key-here

# Groq
GROQ_API_KEY=your-groq-api-key-here

# Moonshot
MOONSHOT_API_KEY=your-moonshot-api-key-here

# DeepSeek
DEEPSEEK_API_KEY=your-deepseek-api-key-here

# Luma
LUMA_API_KEY=your-luma-api-key-here

Expand Down
1 change: 1 addition & 0 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ jobs:
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
MOONSHOT_API_KEY: ${{ secrets.MOONSHOT_API_KEY }}
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
HF_TOKEN: ${{ secrets.HF_TOKEN }}
run: |
uv run pytest "$TEST_FILE" \
-m integration -v \
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "celeste-ai"
version = "0.10.1"
version = "0.10.2"
description = "Open source, type-safe primitives for multi-modal AI. All capabilities, all providers, one interface"
authors = [{name = "Kamilbenkirane", email = "kamil@withceleste.ai"}]
readme = "README.md"
Expand Down
2 changes: 1 addition & 1 deletion src/celeste/credentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class Credentials(BaseSettings):
google_api_key: SecretStr | None = Field(None, alias="GOOGLE_API_KEY")
mistral_api_key: SecretStr | None = Field(None, alias="MISTRAL_API_KEY")
moonshot_api_key: SecretStr | None = Field(None, alias="MOONSHOT_API_KEY")
huggingface_token: SecretStr | None = Field(None, alias="HUGGINGFACE_TOKEN")
hf_token: SecretStr | None = Field(None, alias="HF_TOKEN")
stabilityai_api_key: SecretStr | None = Field(None, alias="STABILITYAI_API_KEY")
replicate_api_token: SecretStr | None = Field(None, alias="REPLICATE_API_TOKEN")
cohere_api_key: SecretStr | None = Field(None, alias="COHERE_API_KEY")
Expand Down
2 changes: 2 additions & 0 deletions src/celeste/modalities/text/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from .providers.deepseek.models import MODELS as DEEPSEEK_MODELS
from .providers.google.models import MODELS as GOOGLE_MODELS
from .providers.groq.models import MODELS as GROQ_MODELS
from .providers.huggingface.models import MODELS as HUGGINGFACE_MODELS
from .providers.mistral.models import MODELS as MISTRAL_MODELS
from .providers.moonshot.models import MODELS as MOONSHOT_MODELS
from .providers.ollama.models import MODELS as OLLAMA_MODELS
Expand All @@ -19,6 +20,7 @@
*DEEPSEEK_MODELS,
*GOOGLE_MODELS,
*GROQ_MODELS,
*HUGGINGFACE_MODELS,
*OLLAMA_MODELS,
*MISTRAL_MODELS,
*MOONSHOT_MODELS,
Expand Down
2 changes: 2 additions & 0 deletions src/celeste/modalities/text/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from .deepseek import DeepSeekTextClient
from .google import GoogleTextClient
from .groq import GroqTextClient
from .huggingface import HuggingFaceTextClient
from .mistral import MistralTextClient
from .moonshot import MoonshotTextClient
from .ollama import OllamaTextClient
Expand All @@ -21,6 +22,7 @@
Provider.DEEPSEEK: DeepSeekTextClient,
Provider.GOOGLE: GoogleTextClient,
Provider.GROQ: GroqTextClient,
Provider.HUGGINGFACE: HuggingFaceTextClient,
Provider.OLLAMA: OllamaTextClient,
Provider.OPENRESPONSES: OpenResponsesTextClient,
Provider.MISTRAL: MistralTextClient,
Expand Down
6 changes: 6 additions & 0 deletions src/celeste/modalities/text/providers/huggingface/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"""HuggingFace provider for text modality."""

from .client import HuggingFaceTextClient
from .models import MODELS

__all__ = ["MODELS", "HuggingFaceTextClient"]
96 changes: 96 additions & 0 deletions src/celeste/modalities/text/providers/huggingface/client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
"""HuggingFace text client (modality)."""

from typing import Any, Unpack

from celeste.parameters import ParameterMapper
from celeste.providers.huggingface.chat.client import HuggingFaceChatClient
from celeste.providers.huggingface.chat.streaming import (
HuggingFaceChatStream as _HuggingFaceChatStream,
)
from celeste.types import ImageContent, Message, TextContent, VideoContent
from celeste.utils import build_image_data_url

from ...client import TextClient
from ...io import (
TextInput,
TextOutput,
)
from ...parameters import TextParameters
from ...streaming import TextStream
from .parameters import HUGGINGFACE_PARAMETER_MAPPERS


class HuggingFaceTextStream(_HuggingFaceChatStream, TextStream):
"""HuggingFace streaming for text modality."""


class HuggingFaceTextClient(HuggingFaceChatClient, TextClient):
"""HuggingFace text client."""

@classmethod
def parameter_mappers(cls) -> list[ParameterMapper]:
return HUGGINGFACE_PARAMETER_MAPPERS

async def generate(
self,
prompt: str | None = None,
*,
messages: list[Message] | None = None,
**parameters: Unpack[TextParameters],
) -> TextOutput:
"""Generate text from prompt."""
inputs = TextInput(prompt=prompt, messages=messages)
return await self._predict(inputs, **parameters)

async def analyze(
self,
prompt: str | None = None,
*,
messages: list[Message] | None = None,
image: ImageContent | None = None,
video: VideoContent | None = None,
**parameters: Unpack[TextParameters],
) -> TextOutput:
"""Analyze image(s) or video(s) with prompt or messages."""
inputs = TextInput(prompt=prompt, messages=messages, image=image, video=video)
return await self._predict(inputs, **parameters)

def _init_request(self, inputs: TextInput) -> dict[str, Any]:
"""Initialize request from HuggingFace messages array format."""
# If messages provided, use them directly (messages take precedence)
if inputs.messages is not None:
return {"messages": [message.model_dump() for message in inputs.messages]}

# Fall back to prompt-based input
if inputs.image is None:
content: str | list[dict[str, Any]] = inputs.prompt or ""
else:
images = inputs.image if isinstance(inputs.image, list) else [inputs.image]
content = [
{
"type": "image_url",
"image_url": {"url": build_image_data_url(img)},
}
for img in images
]
content.append({"type": "text", "text": inputs.prompt or ""})

return {"messages": [{"role": "user", "content": content}]}

def _parse_content(
self,
response_data: dict[str, Any],
**parameters: Unpack[TextParameters],
) -> TextContent:
"""Parse content from response."""
choices = super()._parse_content(response_data)
message = choices[0].get("message", {})
content = message.get("content") or ""
return self._transform_output(content, **parameters)

def _stream_class(self) -> type[TextStream]:
"""Return the Stream class for this provider."""
return HuggingFaceTextStream


__all__ = ["HuggingFaceTextClient", "HuggingFaceTextStream"]
35 changes: 35 additions & 0 deletions src/celeste/modalities/text/providers/huggingface/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
"""HuggingFace models for text modality."""

from celeste.constraints import ImagesConstraint, Range, Schema
from celeste.core import Modality, Operation, Parameter, Provider
from celeste.models import Model

from ...parameters import TextParameter

MODELS: list[Model] = [
Model(
id="Qwen/Qwen3-4B-Instruct-2507",
provider=Provider.HUGGINGFACE,
display_name="Qwen 3 4B Instruct",
operations={Modality.TEXT: {Operation.GENERATE}},
streaming=True,
parameter_constraints={
Parameter.TEMPERATURE: Range(min=0.0, max=2.0, step=0.01),
Parameter.MAX_TOKENS: Range(min=1, max=32768, step=1),
TextParameter.OUTPUT_SCHEMA: Schema(),
},
),
Model(
id="google/gemma-3n-E4B-it",
provider=Provider.HUGGINGFACE,
display_name="Gemma 3n E4B IT",
operations={Modality.TEXT: {Operation.GENERATE, Operation.ANALYZE}},
streaming=True,
parameter_constraints={
Parameter.TEMPERATURE: Range(min=0.0, max=2.0, step=0.01),
Parameter.MAX_TOKENS: Range(min=1, max=32768, step=1),
TextParameter.OUTPUT_SCHEMA: Schema(),
TextParameter.IMAGE: ImagesConstraint(),
},
),
]
41 changes: 41 additions & 0 deletions src/celeste/modalities/text/providers/huggingface/parameters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""HuggingFace parameter mappers for text."""

from celeste.parameters import ParameterMapper
from celeste.protocols.chatcompletions.parameters import (
MaxTokensMapper as _MaxTokensMapper,
)
from celeste.protocols.chatcompletions.parameters import (
TemperatureMapper as _TemperatureMapper,
)
from celeste.providers.huggingface.chat.parameters import (
ResponseFormatMapper as _ResponseFormatMapper,
)

from ...parameters import TextParameter


class TemperatureMapper(_TemperatureMapper):
"""Map temperature to HuggingFace's temperature parameter."""

name = TextParameter.TEMPERATURE


class MaxTokensMapper(_MaxTokensMapper):
"""Map max_tokens to HuggingFace's max_tokens parameter."""

name = TextParameter.MAX_TOKENS


class OutputSchemaMapper(_ResponseFormatMapper):
"""Map output_schema to HuggingFace's response_format parameter."""

name = TextParameter.OUTPUT_SCHEMA


HUGGINGFACE_PARAMETER_MAPPERS: list[ParameterMapper] = [
TemperatureMapper(),
MaxTokensMapper(),
OutputSchemaMapper(),
]

__all__ = ["HUGGINGFACE_PARAMETER_MAPPERS"]
2 changes: 2 additions & 0 deletions src/celeste/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
google,
gradium,
groq,
huggingface,
mistral,
moonshot,
ollama,
Expand All @@ -27,6 +28,7 @@
"google",
"gradium",
"groq",
"huggingface",
"mistral",
"moonshot",
"ollama",
Expand Down
11 changes: 11 additions & 0 deletions src/celeste/providers/huggingface/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""HuggingFace provider package for Celeste AI."""

from celeste.core import Provider
from celeste.credentials import register_auth

register_auth( # nosec B106 - env var name, not hardcoded password
provider=Provider.HUGGINGFACE,
secret_name="HF_TOKEN",
header="Authorization",
prefix="Bearer ",
)
1 change: 1 addition & 0 deletions src/celeste/providers/huggingface/chat/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""HuggingFace Chat API provider package."""
24 changes: 24 additions & 0 deletions src/celeste/providers/huggingface/chat/client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""HuggingFace Chat API client mixin."""

from typing import ClassVar

from celeste.protocols.chatcompletions import ChatCompletionsClient

from . import config


class HuggingFaceChatClient(ChatCompletionsClient):
"""Mixin for HuggingFace Chat API capabilities.

Inherits shared Chat Completions implementation. Only overrides:
- _default_base_url - HuggingFace router base URL
- _default_endpoint - HuggingFace uses /v1/chat/completions
"""

_default_base_url: ClassVar[str] = config.BASE_URL
_default_endpoint: ClassVar[str] = (
config.HuggingFaceChatEndpoint.CREATE_CHAT_COMPLETION
)


__all__ = ["HuggingFaceChatClient"]
13 changes: 13 additions & 0 deletions src/celeste/providers/huggingface/chat/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
"""Configuration for HuggingFace Chat API."""

from enum import StrEnum


class HuggingFaceChatEndpoint(StrEnum):
"""Endpoints for HuggingFace Chat API."""

CREATE_CHAT_COMPLETION = "/v1/chat/completions"
LIST_MODELS = "/v1/models"


BASE_URL = "https://router.huggingface.co"
Loading
Loading