Skip to content

Commit 50b4b3a

Browse files
feat(api): update via SDK Studio (#225)
1 parent 0782fb2 commit 50b4b3a

File tree

3 files changed

+33
-34
lines changed

3 files changed

+33
-34
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 22
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-34edf740524e434708905ba916368bd4b1b335aa95cc8c26883f25d3dfbdd221.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-9cff8ea13f14bd0899df69243fe78b4f88d4d0172263aa260af1ea66a7d0484e.yml

src/resources/prompts.ts

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,9 @@ export class Prompts extends APIResource {
4747
}
4848

4949
/**
50-
* Fetches the configured model parameters and messages rendered with the provided
51-
* variables mapped to the set LLM provider. This endpoint abstracts the need to
52-
* handle mapping between different providers, while still allowing direct calls to
53-
* the providers.
50+
* Fetches the model configuration parameters for a specified prompt, including
51+
* penalty settings, response format, and the model messages rendered with the
52+
* given variables mapped to the set LLM provider.
5453
*/
5554
getParameters(
5655
id: string,
@@ -539,9 +538,14 @@ export namespace PromptConfiguration {
539538
maxTokens: number | null;
540539

541540
/**
542-
* The name of the model for the provider.
541+
* Example: "gpt-3.5-turbo"
543542
*/
544-
name: string;
543+
modelName: string;
544+
545+
/**
546+
* The provider of the provided model.
547+
*/
548+
modelProvider: 'ANTHROPIC' | 'OPENAI';
545549

546550
parallelToolCalls: boolean;
547551

@@ -550,11 +554,6 @@ export namespace PromptConfiguration {
550554
*/
551555
presencePenalty: number;
552556

553-
/**
554-
* The LLM model provider.
555-
*/
556-
provider: 'ANTHROPIC' | 'OPENAI';
557-
558557
/**
559558
* Example: PromptResponseFormat.TEXT
560559
*/
@@ -717,9 +716,14 @@ export namespace PromptCreateParams {
717716
maxTokens: number | null;
718717

719718
/**
720-
* The name of the model for the provider.
719+
* Example: "gpt-3.5-turbo"
721720
*/
722-
name: string;
721+
modelName: string;
722+
723+
/**
724+
* The provider of the provided model.
725+
*/
726+
modelProvider: 'ANTHROPIC' | 'OPENAI';
723727

724728
parallelToolCalls: boolean;
725729

@@ -728,11 +732,6 @@ export namespace PromptCreateParams {
728732
*/
729733
presencePenalty: number;
730734

731-
/**
732-
* The LLM model provider.
733-
*/
734-
provider: 'ANTHROPIC' | 'OPENAI';
735-
736735
/**
737736
* Example: PromptResponseFormat.TEXT
738737
*/
@@ -867,9 +866,14 @@ export namespace PromptUpdateParams {
867866
maxTokens: number | null;
868867

869868
/**
870-
* The name of the model for the provider.
869+
* Example: "gpt-3.5-turbo"
871870
*/
872-
name: string;
871+
modelName: string;
872+
873+
/**
874+
* The provider of the provided model.
875+
*/
876+
modelProvider: 'ANTHROPIC' | 'OPENAI';
873877

874878
parallelToolCalls: boolean;
875879

@@ -878,11 +882,6 @@ export namespace PromptUpdateParams {
878882
*/
879883
presencePenalty: number;
880884

881-
/**
882-
* The LLM model provider.
883-
*/
884-
provider: 'ANTHROPIC' | 'OPENAI';
885-
886885
/**
887886
* Example: PromptResponseFormat.TEXT
888887
*/

tests/api-resources/prompts.test.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@ describe('resource prompts', () => {
4242
],
4343
name: 'name',
4444
parameters: {
45-
provider: 'ANTHROPIC',
46-
name: 'name',
45+
modelProvider: 'ANTHROPIC',
46+
modelName: 'modelName',
4747
responseFormat: 'JSON',
4848
temperature: 0,
4949
topP: 0,
@@ -100,8 +100,8 @@ describe('resource prompts', () => {
100100
],
101101
name: 'name',
102102
parameters: {
103-
provider: 'ANTHROPIC',
104-
name: 'name',
103+
modelProvider: 'ANTHROPIC',
104+
modelName: 'modelName',
105105
responseFormat: 'JSON',
106106
temperature: 0,
107107
topP: 0,
@@ -151,8 +151,8 @@ describe('resource prompts', () => {
151151
],
152152
name: 'name',
153153
parameters: {
154-
provider: 'ANTHROPIC',
155-
name: 'name',
154+
modelProvider: 'ANTHROPIC',
155+
modelName: 'modelName',
156156
responseFormat: 'JSON',
157157
temperature: 0,
158158
topP: 0,
@@ -209,8 +209,8 @@ describe('resource prompts', () => {
209209
],
210210
name: 'name',
211211
parameters: {
212-
provider: 'ANTHROPIC',
213-
name: 'name',
212+
modelProvider: 'ANTHROPIC',
213+
modelName: 'modelName',
214214
responseFormat: 'JSON',
215215
temperature: 0,
216216
topP: 0,

0 commit comments

Comments
 (0)