Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Commit 270aeec

Browse files
Fix FIM for OpenRouter
FIM was not working with OpenRouter in Continue. The reason was that we get FIM requests in `/completions`. LiteLLM when using `acompletion` is forcing `/chat/completions` and the return format `{..., "choices":[{"delta":{"content":"some text"}}]}`. However, Continue was expecting the format: `{..., "choices":[{"text":"some text"}]}` becuase of the endpoint it called. With this PR we force the return format to be the latter using `atext_completion` from LiteLLM
1 parent 609ae50 commit 270aeec

File tree

3 files changed

+14
-13
lines changed

3 files changed

+14
-13
lines changed

src/codegate/providers/litellmshim/generators.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,20 +16,13 @@ async def sse_stream_generator(stream: AsyncIterator[Any]) -> AsyncIterator[str]
1616
# alternatively we might want to just dump the whole object
1717
# this might even allow us to tighten the typing of the stream
1818
chunk = chunk.model_dump_json(exclude_none=True, exclude_unset=True)
19-
# chunk_dict = json.loads(chunk)
20-
# chunk_dict["system_fingerprint"] = "fp_00428b782a"
21-
# chunk = json.dumps(chunk_dict)
2219
try:
23-
print(f"===============\ndata: {chunk}\n\n")
2420
yield f"data: {chunk}\n\n"
2521
except Exception as e:
26-
print(f"===============\ndata: {str(e)}\n\n")
2722
yield f"data: {str(e)}\n\n"
2823
except Exception as e:
29-
print(f"===============\ndata: {str(e)}\n\n")
3024
yield f"data: {str(e)}\n\n"
3125
finally:
32-
print(f"===============\ndata: [DONE]\n\n")
3326
yield "data: [DONE]\n\n"
3427

3528

src/codegate/providers/openai/provider.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,9 @@ class OpenAIProvider(BaseProvider):
1818
def __init__(
1919
self,
2020
pipeline_factory: PipelineFactory,
21+
# Enable receiving other completion handlers from childs, i.e. OpenRouter and LM Studio
22+
completion_handler: LiteLLmShim = LiteLLmShim(stream_generator=sse_stream_generator),
2123
):
22-
completion_handler = LiteLLmShim(stream_generator=sse_stream_generator)
2324
super().__init__(
2425
OpenAIInputNormalizer(),
2526
OpenAIOutputNormalizer(),

src/codegate/providers/openrouter/provider.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,14 @@
22
from typing import Dict
33

44
from fastapi import Header, HTTPException, Request
5+
from litellm import atext_completion
56
from litellm.types.llms.openai import ChatCompletionRequest
67

78
from codegate.clients.clients import ClientType
89
from codegate.clients.detector import DetectClient
910
from codegate.pipeline.factory import PipelineFactory
1011
from codegate.providers.fim_analyzer import FIMAnalyzer
12+
from codegate.providers.litellmshim import LiteLLmShim, sse_stream_generator
1113
from codegate.providers.normalizer.completion import CompletionNormalizer
1214
from codegate.providers.openai import OpenAIProvider
1315

@@ -20,15 +22,20 @@ def normalize(self, data: Dict) -> ChatCompletionRequest:
2022
return super().normalize(data)
2123

2224
def denormalize(self, data: ChatCompletionRequest) -> Dict:
23-
if data.get("had_prompt_before", False):
24-
del data["had_prompt_before"]
25-
26-
return data
25+
return super().denormalize(data)
2726

2827

2928
class OpenRouterProvider(OpenAIProvider):
3029
def __init__(self, pipeline_factory: PipelineFactory):
31-
super().__init__(pipeline_factory)
30+
super().__init__(
31+
pipeline_factory,
32+
# We get FIM requests in /completions. LiteLLM is forcing /chat/completions
33+
# which returns "choices":[{"delta":{"content":"some text"}}]
34+
# instead of "choices":[{"text":"some text"}] expected by the client (Continue)
35+
completion_handler=LiteLLmShim(
36+
stream_generator=sse_stream_generator, fim_completion_func=atext_completion
37+
),
38+
)
3239
self._fim_normalizer = OpenRouterNormalizer()
3340

3441
@property

0 commit comments

Comments
 (0)