Skip to content

Commit 493bfe5

Browse files
committed
fix: use model defaultTemperature over provider default in BaseOpenAiCompatibleProvider
1 parent 193be14 commit 493bfe5

File tree

2 files changed

+77
-8
lines changed

2 files changed

+77
-8
lines changed

src/api/providers/__tests__/fireworks.spec.ts

Lines changed: 76 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -449,16 +449,85 @@ describe("FireworksHandler", () => {
449449
)
450450
})
451451

452-
it("should use default temperature of 0.5", () => {
453-
const testModelId: FireworksModelId = "accounts/fireworks/models/kimi-k2-instruct"
452+
it("should use provider default temperature of 0.5 for models without defaultTemperature", async () => {
453+
const modelId: FireworksModelId = "accounts/fireworks/models/kimi-k2-instruct"
454454
const handlerWithModel = new FireworksHandler({
455-
apiModelId: testModelId,
455+
apiModelId: modelId,
456456
fireworksApiKey: "test-fireworks-api-key",
457457
})
458-
const model = handlerWithModel.getModel()
459-
// The temperature is set in the constructor as defaultTemperature: 0.5
460-
// This test verifies the handler is configured with the correct default temperature
461-
expect(handlerWithModel).toBeDefined()
458+
459+
mockCreate.mockImplementationOnce(() => ({
460+
[Symbol.asyncIterator]: () => ({
461+
async next() {
462+
return { done: true }
463+
},
464+
}),
465+
}))
466+
467+
const messageGenerator = handlerWithModel.createMessage("system", [])
468+
await messageGenerator.next()
469+
470+
expect(mockCreate).toHaveBeenCalledWith(
471+
expect.objectContaining({
472+
temperature: 0.5,
473+
}),
474+
undefined,
475+
)
476+
})
477+
478+
it("should use model defaultTemperature (1.0) over provider default (0.5) for kimi-k2-thinking", async () => {
479+
const modelId: FireworksModelId = "accounts/fireworks/models/kimi-k2-thinking"
480+
const handlerWithModel = new FireworksHandler({
481+
apiModelId: modelId,
482+
fireworksApiKey: "test-fireworks-api-key",
483+
})
484+
485+
mockCreate.mockImplementationOnce(() => ({
486+
[Symbol.asyncIterator]: () => ({
487+
async next() {
488+
return { done: true }
489+
},
490+
}),
491+
}))
492+
493+
const messageGenerator = handlerWithModel.createMessage("system", [])
494+
await messageGenerator.next()
495+
496+
// Model's defaultTemperature (1.0) should take precedence over provider's default (0.5)
497+
expect(mockCreate).toHaveBeenCalledWith(
498+
expect.objectContaining({
499+
temperature: 1.0,
500+
}),
501+
undefined,
502+
)
503+
})
504+
505+
it("should use user-specified temperature over model and provider defaults", async () => {
506+
const modelId: FireworksModelId = "accounts/fireworks/models/kimi-k2-thinking"
507+
const handlerWithModel = new FireworksHandler({
508+
apiModelId: modelId,
509+
fireworksApiKey: "test-fireworks-api-key",
510+
modelTemperature: 0.7,
511+
})
512+
513+
mockCreate.mockImplementationOnce(() => ({
514+
[Symbol.asyncIterator]: () => ({
515+
async next() {
516+
return { done: true }
517+
},
518+
}),
519+
}))
520+
521+
const messageGenerator = handlerWithModel.createMessage("system", [])
522+
await messageGenerator.next()
523+
524+
// User-specified temperature should take precedence over everything
525+
expect(mockCreate).toHaveBeenCalledWith(
526+
expect.objectContaining({
527+
temperature: 0.7,
528+
}),
529+
undefined,
530+
)
462531
})
463532

464533
it("should handle empty response in completePrompt", async () => {

src/api/providers/base-openai-compatible-provider.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
8484
format: "openai",
8585
}) ?? undefined
8686

87-
const temperature = this.options.modelTemperature ?? this.defaultTemperature
87+
const temperature = this.options.modelTemperature ?? info.defaultTemperature ?? this.defaultTemperature
8888

8989
const params: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
9090
model,

0 commit comments

Comments
 (0)