Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"packagejson",
"prefault",
"preinstall",
"ptaas",
"remeda",
"traceparent",
"tracestate",
Expand Down
8 changes: 4 additions & 4 deletions examples/cozeloop-ai-node/src/practice/travel-plan.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { type ChatCompletionCreateParams } from 'openai/resources/chat';
import { OpenAI } from 'openai';
import { cozeLoopTracer, PromptHub, SpanKind } from '@cozeloop/ai';
import { cozeLoopTracer, PromptHub, type Span, SpanKind } from '@cozeloop/ai';

// initialize tracer globally
cozeLoopTracer.initialize({
Expand All @@ -23,7 +23,7 @@ async function callLLM(messages: ChatCompletionCreateParams['messages']) {

// wrap model as a span node with `cozeLoopTracer.traceable`
return await cozeLoopTracer.traceable(
async span => {
async (span: Span) => {
cozeLoopTracer.setInput(span, { messages });

const resp = await openai.chat.completions.create({
Expand Down Expand Up @@ -64,7 +64,7 @@ async function runTravelPlan(options: TravelPlanOptions) {
const messages = hub.formatPrompt(prompt, { ...options });

// invoke model
return callLLM(messages);
return callLLM(messages as ChatCompletionCreateParams['messages']);
}

async function run() {
Expand All @@ -77,7 +77,7 @@ async function run() {
};

const result = await cozeLoopTracer.traceable(
async span => {
async (span: Span) => {
cozeLoopTracer.setInput(span, options);
const { choices } = await runTravelPlan(options);

Expand Down
2 changes: 2 additions & 0 deletions examples/cozeloop-ai-node/src/prompt/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { run as runMultiPart } from './with-multi-part';
import { run as runWithLabel } from './with-label';
import { run as runWithJinja } from './with-jinja';
import { run as runPTaaS } from './ptaas';
import { run as runBasic } from './hub';

export async function run() {
Expand All @@ -9,6 +10,7 @@ export async function run() {
runWithJinja(),
runMultiPart(),
runWithLabel(),
runPTaaS(),
]);

process.exit(0);
Expand Down
132 changes: 132 additions & 0 deletions examples/cozeloop-ai-node/src/prompt/ptaas.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
import assert from 'node:assert';

import { PromptAsAService } from '@cozeloop/ai';

async function runWithNormal() {
const model = new PromptAsAService({
/** workspace id, use process.env.COZELOOP_WORKSPACE_ID when unprovided */
// workspaceId: 'your_workspace_id',
apiClient: {
// baseURL: 'api_base_url',
// token: 'your_api_token',
},
prompt: {
prompt_key: 'CozeLoop_Travel_Master',
version: '0.0.2',
},
});

// 1. model.invoke
const reply = await model.invoke({
messages: [{ role: 'user', content: '帮我规划轻松旅行' }],
variables: {
departure: '北京',
destination: '上海',
people_num: 2,
days_num: 1,
travel_theme: '亲子',
},
});

assert(reply?.message);
assert(reply.usage);
assert.strictEqual(reply.finish_reason, 'stop');

// 2. model.stream
const replyStream = await model.stream({
messages: [{ role: 'user', content: '帮我规划轻松旅行' }],
variables: {
departure: '北京',
destination: '上海',
people_num: 2,
days_num: 1,
travel_theme: '亲子',
},
});

for await (const chunk of replyStream) {
assert(chunk);
}
}

async function runWithJinja() {
const model = new PromptAsAService({
/** workspace id, use process.env.COZELOOP_WORKSPACE_ID when unprovided */
// workspaceId: 'your_workspace_id',
apiClient: {
// baseURL: 'api_base_url',
// token: 'your_api_token',
},
prompt: {
prompt_key: 'loop12',
version: '0.0.5',
},
});

// 1. model.invoke
const reply = await model.invoke({
messages: [{ role: 'user', content: '总结模板内容' }],
variables: {
title: 'Title',
user: {
is_authenticated: false,
name: 'Loop',
},
items: [{ name: 'fish' }],
place: [{ role: 'assistant', content: '好的' }],
},
});

assert(reply?.message);
assert(reply.usage);
assert.strictEqual(reply.finish_reason, 'stop');
}

async function runWithMultiPart() {
const model = new PromptAsAService({
/** workspace id, use process.env.COZELOOP_WORKSPACE_ID when unprovided */
// workspaceId: 'your_workspace_id',
apiClient: {
// baseURL: 'api_base_url',
// token: 'your_api_token',
},
prompt: {
prompt_key: 'loop',
version: '0.0.3',
},
});

const replyStream = await model.stream({
messages: [{ role: 'user', content: 'respond in 50 words' }],
variables: {
var1: 'sports',
placeholder1: { role: 'assistant', content: 'go on' },
var2: 'how to play football',
img1: [
{ type: 'text', text: 'text1' },
{
type: 'image_url',
image_url: {
url: 'https://tinypng.com/static/images/george-anim/large_george_x2.webp',
},
},
],
},
});

for await (const chunk of replyStream) {
assert(chunk);
}
}

export async function run() {
await Promise.all([
runWithNormal(), // 普通模板
runWithJinja(), // Jinja2 模板
runWithMultiPart(), // 多模态变量
]);

process.exit();
}

run();
3 changes: 3 additions & 0 deletions packages/cozeloop-ai/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# 🕗 ChangeLog - @cozeloop/ai

## 0.0.9
* Prompt as a Service (ptaas)

## 0.0.8
* PromptHub: get prompt with label

Expand Down
34 changes: 24 additions & 10 deletions packages/cozeloop-ai/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,30 +19,44 @@ pnpm install @cozeloop/ai

### 2. Basic Usage
```typescript
import { ApiClient, PromptHub } from '@cozeloop/ai';
import { ApiClient, PromptHub, PromptAsAService } from '@cozeloop/ai';

// 1. setup API client
// 1. Setup API client
const apiClient = new ApiClient({
baseURL: 'https://api.coze.cn',
token: 'your_access_token',
});

// 2. Using prompt hub to get prompt
const promptHub = new PromptHub({
// 2. Using `PromptHub` or `PromptAsAService`
const hub = new PromptHub({
// or set it as process.env.COZELOOP_WORKSPACE_ID,
workspaceId: 'your_workspace_id',
apiClient,
});
// hub.getPrompt(key, version);
// hub.formatPrompt(prompt);

const prompt = await promptHub.getPrompt(
'your_prompt_key',
'prompt_version (optional)',
);
const model = new PromptAsAService({
// or set it as process.env.COZELOOP_WORKSPACE_ID,
workspaceId: 'your_workspace_id',
// prompt to invoke as a service
prompt: {
prompt_key: 'your_prompt_key',
},
apiClient,
});
// model.invoke({
// messages: [{ role: 'user', content: 'hi' }],
// });
// model.stream({
// messages: [{ role: 'user', content: 'hi' }],
// });
```

## Key Features
- 🗄️ **Prompt Hub**: Develop, submit and publish prompts on [CozeLoop](https://loop.coze.cn), and access them it via `PromptHub`
- 🔐 **Authentication Methods**: PAT and JWT
- 🗂️ **Prompt Hub**: Develop, submit and publish prompts on [CozeLoop](https://loop.coze.cn), and access them via `PromptHub`
- 🛠️ **Prompt as a Service**: Develop, submit and publish prompts on [CozeLoop](https://loop.coze.cn), and invoke them as services
- 🔐 **Authentication Methods**: PAT, SAT and JWT
- ⚙️ **Configurable**: Timeout, headers, signal, debug options

## Authentication Options
Expand Down
28 changes: 21 additions & 7 deletions packages/cozeloop-ai/README.zh-CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,29 +19,43 @@ pnpm install @cozeloop/ai

### 2. 基础用法
```typescript
import { ApiClient, PromptHub } from '@cozeloop/ai';
import { ApiClient, PromptHub, PromptAsAService } from '@cozeloop/ai';

// 1. 设置 ApiClient
const apiClient = new ApiClient({
baseURL: 'https://api.coze.cn',
token: 'your_access_token',
});

// 2. 使用 PromptHub 获取 Prompt
// 2. 使用 `PromptHub` 或 `PromptAsAService`
const promptHub = new PromptHub({
// 或设置环境变量 process.env.COZELOOP_WORKSPACE_ID,
workspaceId: 'your_workspace_id',
apiClient,
});
// hub.getPrompt(key, version);
// hub.formatPrompt(prompt);

const prompt = await promptHub.getPrompt(
'your_prompt_key',
'prompt_version (optional)',
);
const model = new PromptAsAService({
// 或设置环境变量 process.env.COZELOOP_WORKSPACE_ID,
workspaceId: 'your_workspace_id',
// 要调用的 prompt
prompt: {
prompt_key: 'your_prompt_key',
},
apiClient,
});
// model.invoke({
// messages: [{ role: 'user', content: 'hi' }],
// });
// model.stream({
// messages: [{ role: 'user', content: 'hi' }],
// });
```

## 主要特性
- 🗄️ **Prompt Hub**: 在 [CozeLoop](https://loop.coze.cn) 平台开发、提交和发布 Prompt,使用 `PromptHub` 访问 Prompt。
- 🗂️ **Prompt Hub**: 在 [CozeLoop](https://loop.coze.cn) 平台开发、提交和发布 Prompt,使用 `PromptHub` 访问 Prompt。
- 🛠️ **Prompt as a Service**: 在 [CozeLoop](https://loop.coze.cn) 平台开发、提交和发布 Prompt,并作为服务调用。
- 🔐 **多种鉴权方式**: PAT and JWT
- ⚙️ **可配置**: 超时、请求头、信号、调试

Expand Down
6 changes: 6 additions & 0 deletions packages/cozeloop-ai/__tests__/__mock__/base-http.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,12 @@ import { fileToStreamResp, headersToJson, setupMockServer } from './utils';

export function setupBaseHttpMock() {
const mockServer = setupServer(
http.post(/\/stream-event-error/i, () =>
fileToStreamResp(join(__dirname, 'base-stream-event-error.txt')),
),
http.post(/\/stream-parse-error/i, () =>
fileToStreamResp(join(__dirname, 'base-stream-parse-error.txt')),
),
http.post(/\/stream/i, () =>
fileToStreamResp(join(__dirname, 'base-stream.txt')),
),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
data: {"seq": 1}

event: error
data: 500 Bad Gateway

Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
data: {"seq"1: 1}

1 change: 1 addition & 0 deletions packages/cozeloop-ai/__tests__/__mock__/base-stream.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ data: {"seq": 1}
data: {"seq": 2}

data: {"seq": 3}

Loading
Loading