Skip to content

Commit b00cbc6

Browse files
stainless-app[bot]stainless-bot
authored andcommitted
feat(api): OpenAPI spec update via Stainless API (#228)
1 parent e20bc0a commit b00cbc6

File tree

3 files changed

+34
-33
lines changed

3 files changed

+34
-33
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 22
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-9cff8ea13f14bd0899df69243fe78b4f88d4d0172263aa260af1ea66a7d0484e.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-d853690356bd7363560a181b7acd421d0fbc1b95800423a0382b2c248edaf87b.yml

src/resources/prompts.ts

Lines changed: 25 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,10 @@ export class Prompts extends APIResource {
4747
}
4848

4949
/**
50-
* Fetches the model configuration parameters for a specified prompt, including
51-
* penalty settings, response format, and the model messages rendered with the
52-
* given variables mapped to the set LLM provider.
50+
* Fetches the configured model parameters and messages rendered with the provided
51+
* variables mapped to the set LLM provider. This endpoint abstracts the need to
52+
* handle mapping between different providers, while still allowing direct calls to
53+
* the providers.
5354
*/
5455
getParameters(
5556
id: string,
@@ -538,14 +539,9 @@ export namespace PromptConfiguration {
538539
maxTokens: number | null;
539540

540541
/**
541-
* Example: "gpt-3.5-turbo"
542+
* The name of the model for the provider.
542543
*/
543-
modelName: string;
544-
545-
/**
546-
* The provider of the provided model.
547-
*/
548-
modelProvider: 'ANTHROPIC' | 'OPENAI';
544+
name: string;
549545

550546
parallelToolCalls: boolean;
551547

@@ -554,6 +550,11 @@ export namespace PromptConfiguration {
554550
*/
555551
presencePenalty: number;
556552

553+
/**
554+
* The LLM model provider.
555+
*/
556+
provider: 'ANTHROPIC' | 'OPENAI';
557+
557558
/**
558559
* Example: PromptResponseFormat.TEXT
559560
*/
@@ -716,14 +717,9 @@ export namespace PromptCreateParams {
716717
maxTokens: number | null;
717718

718719
/**
719-
* Example: "gpt-3.5-turbo"
720+
* The name of the model for the provider.
720721
*/
721-
modelName: string;
722-
723-
/**
724-
* The provider of the provided model.
725-
*/
726-
modelProvider: 'ANTHROPIC' | 'OPENAI';
722+
name: string;
727723

728724
parallelToolCalls: boolean;
729725

@@ -732,6 +728,11 @@ export namespace PromptCreateParams {
732728
*/
733729
presencePenalty: number;
734730

731+
/**
732+
* The LLM model provider.
733+
*/
734+
provider: 'ANTHROPIC' | 'OPENAI';
735+
735736
/**
736737
* Example: PromptResponseFormat.TEXT
737738
*/
@@ -866,14 +867,9 @@ export namespace PromptUpdateParams {
866867
maxTokens: number | null;
867868

868869
/**
869-
* Example: "gpt-3.5-turbo"
870-
*/
871-
modelName: string;
872-
873-
/**
874-
* The provider of the provided model.
870+
* The name of the model for the provider.
875871
*/
876-
modelProvider: 'ANTHROPIC' | 'OPENAI';
872+
name: string;
877873

878874
parallelToolCalls: boolean;
879875

@@ -882,6 +878,11 @@ export namespace PromptUpdateParams {
882878
*/
883879
presencePenalty: number;
884880

881+
/**
882+
* The LLM model provider.
883+
*/
884+
provider: 'ANTHROPIC' | 'OPENAI';
885+
885886
/**
886887
* Example: PromptResponseFormat.TEXT
887888
*/

tests/api-resources/prompts.test.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@ describe('resource prompts', () => {
4242
],
4343
name: 'name',
4444
parameters: {
45-
modelProvider: 'ANTHROPIC',
46-
modelName: 'modelName',
45+
provider: 'ANTHROPIC',
46+
name: 'name',
4747
responseFormat: 'JSON',
4848
temperature: 0,
4949
topP: 0,
@@ -100,8 +100,8 @@ describe('resource prompts', () => {
100100
],
101101
name: 'name',
102102
parameters: {
103-
modelProvider: 'ANTHROPIC',
104-
modelName: 'modelName',
103+
provider: 'ANTHROPIC',
104+
name: 'name',
105105
responseFormat: 'JSON',
106106
temperature: 0,
107107
topP: 0,
@@ -151,8 +151,8 @@ describe('resource prompts', () => {
151151
],
152152
name: 'name',
153153
parameters: {
154-
modelProvider: 'ANTHROPIC',
155-
modelName: 'modelName',
154+
provider: 'ANTHROPIC',
155+
name: 'name',
156156
responseFormat: 'JSON',
157157
temperature: 0,
158158
topP: 0,
@@ -209,8 +209,8 @@ describe('resource prompts', () => {
209209
],
210210
name: 'name',
211211
parameters: {
212-
modelProvider: 'ANTHROPIC',
213-
modelName: 'modelName',
212+
provider: 'ANTHROPIC',
213+
name: 'name',
214214
responseFormat: 'JSON',
215215
temperature: 0,
216216
topP: 0,

0 commit comments

Comments
 (0)