Skip to content

Commit

Permalink
[Security solution] Do not send model if connector is inference (#206515
Browse files Browse the repository at this point in the history
)
  • Loading branch information
stephmilovic authored Jan 13, 2025
1 parent f7a373b commit 1d90b39
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,6 @@ describe('ActionsClientChatOpenAI', () => {
subAction: 'unified_completion_async_iterator',
subActionParams: {
body: {
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Do you know my name?' }],

n: defaultStreamingArgs.n,
Expand Down Expand Up @@ -264,7 +263,6 @@ describe('ActionsClientChatOpenAI', () => {
subActionParams: {
body: {
temperature: 0.2,
model: 'gpt-4o',
n: 99,
stop: ['a stop sequence'],
tools: [{ function: jest.fn(), type: 'function' }],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ export class ActionsClientChatOpenAI extends ChatOpenAI {
// possible client model override
// security sends this from connectors, it is only missing from preconfigured connectors
// this should be undefined otherwise so the connector handles the model (stack_connector has access to preconfigured connector model values)
model: this.model,
...(llmType === 'inference' ? {} : { model: this.model }),
n: completionRequest.n,
stop: completionRequest.stop,
tools: completionRequest.tools,
Expand Down

0 comments on commit 1d90b39

Please sign in to comment.