Skip to content

Commit

Permalink
Fix use other chat completion providers (#2153)
Browse files Browse the repository at this point in the history
* Fix: use other LLMs providers for chat completion

* lint
  • Loading branch information
Wauplin authored Mar 25, 2024
1 parent 59be8f2 commit 9e13b83
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 2 deletions.
4 changes: 3 additions & 1 deletion src/huggingface_hub/inference/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,9 @@ def chat_completion(
if _is_chat_completion_server(model):
# First, let's consider the server has a `/v1/chat/completions` endpoint.
# If that's the case, we don't have to render the chat template client-side.
model_url = self._resolve_url(model) + "/v1/chat/completions"
model_url = self._resolve_url(model)
if not model_url.endswith("/chat/completions"):
model_url += "/v1/chat/completions"

try:
data = self.post(
Expand Down
4 changes: 3 additions & 1 deletion src/huggingface_hub/inference/_generated/_async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,9 @@ async def chat_completion(
if _is_chat_completion_server(model):
# First, let's consider the server has a `/v1/chat/completions` endpoint.
# If that's the case, we don't have to render the chat template client-side.
model_url = self._resolve_url(model) + "/v1/chat/completions"
model_url = self._resolve_url(model)
if not model_url.endswith("/chat/completions"):
model_url += "/v1/chat/completions"

try:
data = await self.post(
Expand Down

0 comments on commit 9e13b83

Please sign in to comment.