Skip to content

Commit 0782fb2

Browse files
feat(api): OpenAPI spec update via Stainless API (#223)
1 parent 95c7058 commit 0782fb2

File tree

3 files changed

+30
-30
lines changed

3 files changed

+30
-30
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 22
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-e8bee356492945c6a488f721b7e79b4f8a55c1edabfed4e2817729017f1ae43d.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-34edf740524e434708905ba916368bd4b1b335aa95cc8c26883f25d3dfbdd221.yml

src/resources/prompts.ts

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -539,14 +539,9 @@ export namespace PromptConfiguration {
539539
maxTokens: number | null;
540540

541541
/**
542-
* Example: "gpt-3.5-turbo"
542+
* The name of the model for the provider.
543543
*/
544-
modelName: string;
545-
546-
/**
547-
* The provider of the provided model.
548-
*/
549-
modelProvider: 'ANTHROPIC' | 'OPENAI';
544+
name: string;
550545

551546
parallelToolCalls: boolean;
552547

@@ -555,6 +550,11 @@ export namespace PromptConfiguration {
555550
*/
556551
presencePenalty: number;
557552

553+
/**
554+
* The LLM model provider.
555+
*/
556+
provider: 'ANTHROPIC' | 'OPENAI';
557+
558558
/**
559559
* Example: PromptResponseFormat.TEXT
560560
*/
@@ -717,14 +717,9 @@ export namespace PromptCreateParams {
717717
maxTokens: number | null;
718718

719719
/**
720-
* Example: "gpt-3.5-turbo"
720+
* The name of the model for the provider.
721721
*/
722-
modelName: string;
723-
724-
/**
725-
* The provider of the provided model.
726-
*/
727-
modelProvider: 'ANTHROPIC' | 'OPENAI';
722+
name: string;
728723

729724
parallelToolCalls: boolean;
730725

@@ -733,6 +728,11 @@ export namespace PromptCreateParams {
733728
*/
734729
presencePenalty: number;
735730

731+
/**
732+
* The LLM model provider.
733+
*/
734+
provider: 'ANTHROPIC' | 'OPENAI';
735+
736736
/**
737737
* Example: PromptResponseFormat.TEXT
738738
*/
@@ -867,14 +867,9 @@ export namespace PromptUpdateParams {
867867
maxTokens: number | null;
868868

869869
/**
870-
* Example: "gpt-3.5-turbo"
871-
*/
872-
modelName: string;
873-
874-
/**
875-
* The provider of the provided model.
870+
* The name of the model for the provider.
876871
*/
877-
modelProvider: 'ANTHROPIC' | 'OPENAI';
872+
name: string;
878873

879874
parallelToolCalls: boolean;
880875

@@ -883,6 +878,11 @@ export namespace PromptUpdateParams {
883878
*/
884879
presencePenalty: number;
885880

881+
/**
882+
* The LLM model provider.
883+
*/
884+
provider: 'ANTHROPIC' | 'OPENAI';
885+
886886
/**
887887
* Example: PromptResponseFormat.TEXT
888888
*/

tests/api-resources/prompts.test.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@ describe('resource prompts', () => {
4242
],
4343
name: 'name',
4444
parameters: {
45-
modelProvider: 'ANTHROPIC',
46-
modelName: 'modelName',
45+
provider: 'ANTHROPIC',
46+
name: 'name',
4747
responseFormat: 'JSON',
4848
temperature: 0,
4949
topP: 0,
@@ -100,8 +100,8 @@ describe('resource prompts', () => {
100100
],
101101
name: 'name',
102102
parameters: {
103-
modelProvider: 'ANTHROPIC',
104-
modelName: 'modelName',
103+
provider: 'ANTHROPIC',
104+
name: 'name',
105105
responseFormat: 'JSON',
106106
temperature: 0,
107107
topP: 0,
@@ -151,8 +151,8 @@ describe('resource prompts', () => {
151151
],
152152
name: 'name',
153153
parameters: {
154-
modelProvider: 'ANTHROPIC',
155-
modelName: 'modelName',
154+
provider: 'ANTHROPIC',
155+
name: 'name',
156156
responseFormat: 'JSON',
157157
temperature: 0,
158158
topP: 0,
@@ -209,8 +209,8 @@ describe('resource prompts', () => {
209209
],
210210
name: 'name',
211211
parameters: {
212-
modelProvider: 'ANTHROPIC',
213-
modelName: 'modelName',
212+
provider: 'ANTHROPIC',
213+
name: 'name',
214214
responseFormat: 'JSON',
215215
temperature: 0,
216216
topP: 0,

0 commit comments

Comments
 (0)