From e05bf25c5a63036b53a13f1526d50b658b353803 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 31 Oct 2024 23:33:01 +0000 Subject: [PATCH 01/31] WIP realtime streams --- .../realtime.v1.streams.$runId.$streamId.ts | 47 ++ .../app/routes/realtime.v1.streams.test.ts | 43 ++ .../app/services/httpAsyncStorage.server.ts | 1 + .../app/services/realtimeStreams.server.ts | 167 +++++ .../services/realtimeStreamsGlobal.server.ts | 19 + apps/webapp/server.ts | 64 +- .../src/entryPoints/deploy-run-worker.ts | 13 +- .../cli-v3/src/entryPoints/dev-run-worker.ts | 13 +- packages/core/src/v3/apiClient/runStream.ts | 209 +++++- packages/core/src/v3/index.ts | 1 + packages/core/src/v3/runMetadata/index.ts | 8 + packages/core/src/v3/runMetadata/manager.ts | 66 ++ .../core/src/v3/runMetadata/metadataStream.ts | 86 +++ .../core/src/v3/runMetadata/noopManager.ts | 3 + packages/core/src/v3/runMetadata/types.ts | 1 + .../core/src/v3/runtime/devRuntimeManager.ts | 3 +- packages/core/src/v3/utils/getEnv.ts | 6 +- packages/core/src/v3/utils/globals.ts | 2 + packages/core/src/v3/wait-until-api.ts | 5 + packages/core/src/v3/waitUntil/index.ts | 54 ++ packages/core/src/v3/waitUntil/manager.ts | 34 + packages/core/src/v3/waitUntil/types.ts | 10 + packages/core/src/v3/workers/index.ts | 1 + packages/core/src/v3/workers/taskExecutor.ts | 21 +- packages/core/test/runStream.test.ts | 610 ++++++++++++++++++ packages/trigger-sdk/src/v3/index.ts | 2 + packages/trigger-sdk/src/v3/metadata.ts | 9 + packages/trigger-sdk/src/v3/waitUntil.ts | 13 + pnpm-lock.yaml | 24 + references/nextjs-realtime/package.json | 1 + references/nextjs-realtime/src/trigger/ai.ts | 75 +++ references/nextjs-realtime/trigger.config.ts | 2 +- stream_fetch_test.js | 125 ++++ stream_test.js | 41 ++ stream_test.sh | 13 + 35 files changed, 1751 insertions(+), 41 deletions(-) create mode 100644 apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts create mode 100644 apps/webapp/app/routes/realtime.v1.streams.test.ts create mode 100644 apps/webapp/app/services/realtimeStreams.server.ts create mode 100644 apps/webapp/app/services/realtimeStreamsGlobal.server.ts create mode 100644 packages/core/src/v3/runMetadata/metadataStream.ts create mode 100644 packages/core/src/v3/wait-until-api.ts create mode 100644 packages/core/src/v3/waitUntil/index.ts create mode 100644 packages/core/src/v3/waitUntil/manager.ts create mode 100644 packages/core/src/v3/waitUntil/types.ts create mode 100644 packages/core/test/runStream.test.ts create mode 100644 packages/trigger-sdk/src/v3/waitUntil.ts create mode 100644 references/nextjs-realtime/src/trigger/ai.ts create mode 100644 stream_fetch_test.js create mode 100644 stream_test.js create mode 100755 stream_test.sh diff --git a/apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts b/apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts new file mode 100644 index 0000000000..ffbdad5850 --- /dev/null +++ b/apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts @@ -0,0 +1,47 @@ +import { ActionFunctionArgs } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { $replica } from "~/db.server"; +import { realtimeStreams } from "~/services/realtimeStreamsGlobal.server"; +import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; + +const ParamsSchema = z.object({ + runId: z.string(), + streamId: z.string(), +}); + +export async function action({ request, params }: ActionFunctionArgs) { + const $params = ParamsSchema.parse(params); + + if (!request.body) { + return new Response("No body provided", { status: 400 }); + } + + return realtimeStreams.ingestData(request.body, $params.runId, $params.streamId); +} + +export const loader = createLoaderApiRoute( + { + params: ParamsSchema, + allowJWT: true, + corsStrategy: "all", + authorization: { + action: "read", + resource: (params) => ({ runs: params.runId }), + superScopes: ["read:runs", "read:all", "admin"], + }, + }, + async ({ params, authentication, request }) => { + const run = await $replica.taskRun.findFirst({ + where: { + friendlyId: params.runId, + runtimeEnvironmentId: authentication.environment.id, + }, + }); + + if (!run) { + return new Response("Run not found", { status: 404 }); + } + + return realtimeStreams.streamResponse(run.friendlyId, params.streamId, request.signal); + } +); diff --git a/apps/webapp/app/routes/realtime.v1.streams.test.ts b/apps/webapp/app/routes/realtime.v1.streams.test.ts new file mode 100644 index 0000000000..6035b78e7d --- /dev/null +++ b/apps/webapp/app/routes/realtime.v1.streams.test.ts @@ -0,0 +1,43 @@ +import { ActionFunctionArgs } from "@remix-run/server-runtime"; + +export async function action({ request }: ActionFunctionArgs) { + if (!request.body) { + return new Response("No body provided", { status: 400 }); + } + + const reader = request.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ""; + + try { + while (true) { + const { done, value } = await reader.read(); + + if (done) { + if (buffer) { + const data = JSON.parse(buffer); + console.log(`${new Date().toISOString()} Received data at end:`, data); + // You can process the data as needed + } + break; + } + + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split("\n"); + buffer = lines.pop() || ""; + + for (const line of lines) { + if (line.trim()) { + const data = JSON.parse(line); + console.log(`${new Date().toISOString()} Received data:`, data); + // You can process each data chunk as needed + } + } + } + + return new Response(null, { status: 200 }); + } catch (error) { + console.error("Error processing stream:", error); + return new Response(null, { status: 500 }); + } +} diff --git a/apps/webapp/app/services/httpAsyncStorage.server.ts b/apps/webapp/app/services/httpAsyncStorage.server.ts index 98fd0f48e8..7b709e4bf1 100644 --- a/apps/webapp/app/services/httpAsyncStorage.server.ts +++ b/apps/webapp/app/services/httpAsyncStorage.server.ts @@ -4,6 +4,7 @@ export type HttpLocalStorage = { requestId: string; path: string; host: string; + method: string; }; const httpLocalStorage = new AsyncLocalStorage(); diff --git a/apps/webapp/app/services/realtimeStreams.server.ts b/apps/webapp/app/services/realtimeStreams.server.ts new file mode 100644 index 0000000000..6ff6bbfbd7 --- /dev/null +++ b/apps/webapp/app/services/realtimeStreams.server.ts @@ -0,0 +1,167 @@ +import Redis, { RedisOptions } from "ioredis"; +import { logger } from "./logger.server"; + +export type RealtimeStreamsOptions = { + redis: RedisOptions | undefined; +}; + +export class RealtimeStreams { + constructor(private options: RealtimeStreamsOptions) {} + + async streamResponse(runId: string, streamId: string, signal: AbortSignal): Promise { + const redis = new Redis(this.options.redis ?? {}); + const streamKey = `stream:${runId}:${streamId}`; + + const stream = new TransformStream({ + transform(chunk: string, controller) { + try { + const data = JSON.parse(chunk); + + if (typeof data === "object" && data !== null && "__end" in data && data.__end === true) { + controller.terminate(); + return; + } + controller.enqueue(`data: ${chunk}\n\n`); + } catch (error) { + console.error("Invalid JSON in stream:", error); + } + }, + }); + + const response = new Response(stream.readable, { + headers: { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", + }, + }); + + let isCleanedUp = false; + + async function cleanup() { + if (isCleanedUp) return; + isCleanedUp = true; + await redis.quit(); + const writer = stream.writable.getWriter(); + if (writer) await writer.close().catch(() => {}); // Ensure close doesn't error if already closed + } + + signal.addEventListener("abort", cleanup); + + (async () => { + let lastId = "0"; + let retryCount = 0; + const maxRetries = 3; + + try { + while (!signal.aborted) { + try { + const messages = await redis.xread( + "COUNT", + 100, + "BLOCK", + 5000, + "STREAMS", + streamKey, + lastId + ); + + retryCount = 0; + + if (messages && messages.length > 0) { + const [_key, entries] = messages[0]; + + for (const [id, fields] of entries) { + lastId = id; + + if (fields && fields.length >= 2 && !stream.writable.locked) { + const writer = stream.writable.getWriter(); + try { + await writer.write(fields[1]); + } finally { + writer.releaseLock(); + } + } + } + } + } catch (error) { + console.error("Error reading from Redis stream:", error); + retryCount++; + if (retryCount >= maxRetries) throw error; + await new Promise((resolve) => setTimeout(resolve, 1000 * retryCount)); + } + } + } catch (error) { + console.error("Fatal error in stream processing:", error); + } finally { + await cleanup(); + } + })(); + + return response; + } + + async ingestData( + stream: ReadableStream, + runId: string, + streamId: string + ): Promise { + const redis = new Redis(this.options.redis ?? {}); + + const streamKey = `stream:${runId}:${streamId}`; + + async function cleanup(stream?: TransformStream) { + try { + await redis.quit(); + if (stream) { + const writer = stream.writable.getWriter(); + await writer.close(); // Catch in case the stream is already closed + } + } catch (error) { + logger.error("[RealtimeStreams][ingestData] Error in cleanup:", { error }); + } + } + + try { + const reader = stream.getReader(); + const decoder = new TextDecoder(); + let buffer = ""; + + while (true) { + const { done, value } = await reader.read(); + + logger.debug("[RealtimeStreams][ingestData] Reading data", { streamKey, done }); + + if (done) { + if (buffer) { + const data = JSON.parse(buffer); + await redis.xadd(streamKey, "*", "data", JSON.stringify(data)); + } + break; + } + + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split("\n"); + buffer = lines.pop() || ""; + + for (const line of lines) { + if (line.trim()) { + const data = JSON.parse(line); + + logger.debug("[RealtimeStreams][ingestData] Ingesting data", { streamKey }); + + await redis.xadd(streamKey, "*", "data", JSON.stringify(data)); + } + } + } + + await redis.xadd(streamKey, "*", "data", JSON.stringify({ __end: true })); + return new Response(null, { status: 200 }); + } catch (error) { + console.error("Error in ingestData:", error); + return new Response(null, { status: 500 }); + } finally { + await cleanup(); + } + } +} diff --git a/apps/webapp/app/services/realtimeStreamsGlobal.server.ts b/apps/webapp/app/services/realtimeStreamsGlobal.server.ts new file mode 100644 index 0000000000..a4f20ac060 --- /dev/null +++ b/apps/webapp/app/services/realtimeStreamsGlobal.server.ts @@ -0,0 +1,19 @@ +import { env } from "~/env.server"; +import { singleton } from "~/utils/singleton"; +import { RealtimeStreams } from "./realtimeStreams.server"; + +function initializeRealtimeStreams() { + return new RealtimeStreams({ + redis: { + port: env.REDIS_PORT, + host: env.REDIS_HOST, + username: env.REDIS_USERNAME, + password: env.REDIS_PASSWORD, + enableAutoPipelining: true, + ...(env.REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), + keyPrefix: "tr:realtime:streams:", + }, + }); +} + +export const realtimeStreams = singleton("realtimeStreams", initializeRealtimeStreams); diff --git a/apps/webapp/server.ts b/apps/webapp/server.ts index 520b138d5e..b762aa4740 100644 --- a/apps/webapp/server.ts +++ b/apps/webapp/server.ts @@ -14,9 +14,9 @@ import { RegistryProxy } from "~/v3/registryProxy.server"; const app = express(); -if (process.env.DISABLE_COMPRESSION !== "1") { - app.use(compression()); -} +// if (process.env.DISABLE_COMPRESSION !== "1") { +// app.use(compression()); +// } // http://expressjs.com/en/advanced/best-practice-security.html#at-a-minimum-disable-x-powered-by-header app.disable("x-powered-by"); @@ -73,15 +73,63 @@ if (process.env.HTTP_SERVER_DISABLED !== "true") { next(); }); - app.use((req, res, next) => { - // Generate a unique request ID for each request - const requestId = nanoid(); + app.post("/realtime/v1/streams/express/test", async (req, res) => { + // Ensure the request is a readable stream + const { method, headers } = req; + console.log("Inside /realtime/v1/streams/express/test"); + + if (method !== "POST") { + return res.status(405).send("Method Not Allowed"); + } - runWithHttpContext({ requestId, path: req.url, host: req.hostname }, next); + // Set encoding to UTF-8 to read string data + req.setEncoding("utf8"); + + let buffer = ""; + + try { + req.on("data", (chunk) => { + buffer += chunk; + const lines = buffer.split("\n"); + buffer = lines.pop() || ""; + + for (const line of lines) { + if (line.trim()) { + const data = JSON.parse(line); + console.log(`${new Date().toISOString()} Received data:`, data); + // You can process each data chunk as needed + } + } + }); + + req.on("end", () => { + if (buffer) { + const data = JSON.parse(buffer); + console.log(`${new Date().toISOString()} Received data at end:`, data); + // You can process the remaining data as needed + } + res.status(200).send(); // Send a success response + }); + + req.on("error", (error) => { + console.error("Error processing stream:", error); + res.status(500).send("Internal Server Error"); + }); + } catch (error) { + console.error("Error processing stream:", error); + res.status(500).send("Internal Server Error"); + } }); + // app.use((req, res, next) => { + // // Generate a unique request ID for each request + // const requestId = nanoid(); + + // runWithHttpContext({ requestId, path: req.url, host: req.hostname, method: req.method }, next); + // }); + if (process.env.DASHBOARD_AND_API_DISABLED !== "true") { - app.use(apiRateLimiter); + // app.use(apiRateLimiter); app.all( "*", diff --git a/packages/cli-v3/src/entryPoints/deploy-run-worker.ts b/packages/cli-v3/src/entryPoints/deploy-run-worker.ts index 96038f095e..2a35f6c0eb 100644 --- a/packages/cli-v3/src/entryPoints/deploy-run-worker.ts +++ b/packages/cli-v3/src/entryPoints/deploy-run-worker.ts @@ -15,6 +15,7 @@ import { ExecutorToWorkerMessageCatalog, timeout, runMetadata, + waitUntil, } from "@trigger.dev/core/v3"; import { TriggerTracer } from "@trigger.dev/core/v3/tracer"; import { ProdRuntimeManager } from "@trigger.dev/core/v3/prod"; @@ -34,6 +35,7 @@ import { usage, UsageTimeoutManager, StandardMetadataManager, + StandardWaitUntilManager, } from "@trigger.dev/core/v3/workers"; import { ZodIpcConnection } from "@trigger.dev/core/v3/zodIpc"; import { readFile } from "node:fs/promises"; @@ -100,8 +102,17 @@ timeout.setGlobalManager(new UsageTimeoutManager(devUsageManager)); taskCatalog.setGlobalTaskCatalog(new StandardTaskCatalog()); const durableClock = new DurableClock(); clock.setGlobalClock(durableClock); -const runMetadataManager = new StandardMetadataManager(); +const runMetadataManager = new StandardMetadataManager( + getEnvVar("TRIGGER_STREAM_URL", getEnvVar("TRIGGER_API_URL")) ?? "https://api.trigger.dev" +); runMetadata.setGlobalManager(runMetadataManager); +const waitUntilManager = new StandardWaitUntilManager(); +waitUntil.setGlobalManager(waitUntilManager); +// Wait for all streams to finish before completing the run +waitUntil.register({ + requiresResolving: () => runMetadataManager.hasActiveStreams(), + promise: () => runMetadataManager.waitForAllStreams(), +}); const triggerLogLevel = getEnvVar("TRIGGER_LOG_LEVEL"); diff --git a/packages/cli-v3/src/entryPoints/dev-run-worker.ts b/packages/cli-v3/src/entryPoints/dev-run-worker.ts index c18dee8cb1..b3a9d81486 100644 --- a/packages/cli-v3/src/entryPoints/dev-run-worker.ts +++ b/packages/cli-v3/src/entryPoints/dev-run-worker.ts @@ -15,6 +15,7 @@ import { ExecutorToWorkerMessageCatalog, timeout, runMetadata, + waitUntil, } from "@trigger.dev/core/v3"; import { TriggerTracer } from "@trigger.dev/core/v3/tracer"; import { DevRuntimeManager } from "@trigger.dev/core/v3/dev"; @@ -33,6 +34,7 @@ import { usage, getNumberEnvVar, StandardMetadataManager, + StandardWaitUntilManager, } from "@trigger.dev/core/v3/workers"; import { ZodIpcConnection } from "@trigger.dev/core/v3/zodIpc"; import { readFile } from "node:fs/promises"; @@ -82,8 +84,17 @@ usage.setGlobalUsageManager(devUsageManager); const devRuntimeManager = new DevRuntimeManager(); runtime.setGlobalRuntimeManager(devRuntimeManager); timeout.setGlobalManager(new UsageTimeoutManager(devUsageManager)); -const runMetadataManager = new StandardMetadataManager(); +const runMetadataManager = new StandardMetadataManager( + getEnvVar("TRIGGER_STREAM_URL", getEnvVar("TRIGGER_API_URL")) ?? "https://api.trigger.dev" +); runMetadata.setGlobalManager(runMetadataManager); +const waitUntilManager = new StandardWaitUntilManager(); +waitUntil.setGlobalManager(waitUntilManager); +// Wait for all streams to finish before completing the run +waitUntil.register({ + requiresResolving: () => runMetadataManager.hasActiveStreams(), + promise: () => runMetadataManager.waitForAllStreams(), +}); const triggerLogLevel = getEnvVar("TRIGGER_LOG_LEVEL"); diff --git a/packages/core/src/v3/apiClient/runStream.ts b/packages/core/src/v3/apiClient/runStream.ts index 64de6eb153..2ecbca9181 100644 --- a/packages/core/src/v3/apiClient/runStream.ts +++ b/packages/core/src/v3/apiClient/runStream.ts @@ -2,6 +2,7 @@ import { DeserializedJson } from "../../schemas/json.js"; import { RunStatus, SubscribeRunRawShape } from "../schemas/api.js"; import { SerializedError } from "../schemas/common.js"; import { AnyRunTypes, AnyTask, InferRunTypes } from "../types/tasks.js"; +import { getEnvVar } from "../utils/getEnv.js"; import { conditionallyImportAndParsePacket, IOPacket, @@ -48,49 +49,167 @@ export type RunShapeStreamOptions = { headers?: Record; fetchClient?: typeof fetch; closeOnComplete?: boolean; + signal?: AbortSignal; }; +export type StreamPartResult> = { + [K in keyof TStreams]: { + type: K; + chunk: TStreams[K]; + run: TRun; + }; +}[keyof TStreams]; + +export type RunWithStreamsResult> = + | { + type: "run"; + run: TRun; + } + | StreamPartResult; + export function runShapeStream( url: string, options?: RunShapeStreamOptions ): RunSubscription { - return new RunSubscription(url, options); + const $options: RunSubscriptionOptions = { + provider: { + async onShape(callback) { + return zodShapeStream(SubscribeRunRawShape, url, callback, options); + }, + }, + streamFactory: new SSEStreamSubscriptionFactory( + getEnvVar("TRIGGER_STREAM_URL", getEnvVar("TRIGGER_API_URL")) ?? "https://api.trigger.dev", + { + headers: options?.headers, + signal: options?.signal, + } + ), + }; + + return new RunSubscription($options); +} + +// First, define interfaces for the stream handling +export interface StreamSubscription { + subscribe(onChunk: (chunk: unknown) => Promise): Promise<() => void>; } +export interface StreamSubscriptionFactory { + createSubscription(runId: string, streamKey: string): StreamSubscription; +} + +// Real implementation for production +export class SSEStreamSubscription implements StreamSubscription { + constructor( + private url: string, + private options: { headers?: Record; signal?: AbortSignal } + ) {} + + async subscribe(onChunk: (chunk: unknown) => Promise): Promise<() => void> { + const response = await fetch(this.url, { + headers: { + Accept: "text/event-stream", + ...this.options.headers, + }, + signal: this.options.signal, + }); + + if (!response.body) { + throw new Error("No response body"); + } + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ""; + + (async () => { + try { + while (true) { + const { done, value } = await reader.read(); + + if (done) break; + + buffer += decoder.decode(value, { stream: true }); + + const lines = buffer.split("\n"); + buffer = lines.pop() || ""; + + for (const line of lines) { + if (line.trim() && !line.startsWith(":")) { + try { + // Strip the "data: " prefix before parsing + const data = line.replace(/^data: /, ""); + const chunk = JSON.parse(data); + await onChunk(chunk); + } catch (e) { + console.error("Error parsing stream chunk:", e); + console.error("Raw line:", line); + } + } + } + } + } catch (error) { + if (error instanceof Error && error.name === "AbortError") { + return; + } + console.error("Error in stream subscription:", error); + } + })(); + + return () => reader.cancel(); + } +} + +export class SSEStreamSubscriptionFactory implements StreamSubscriptionFactory { + constructor( + private baseUrl: string, + private options: { headers?: Record; signal?: AbortSignal } + ) {} + + createSubscription(runId: string, streamKey: string): StreamSubscription { + const url = `${this.baseUrl}/realtime/v1/streams/${runId}/${streamKey}`; + return new SSEStreamSubscription(url, this.options); + } +} + +export interface RunShapeProvider { + onShape(callback: (shape: SubscribeRunRawShape) => Promise): Promise<() => void>; +} + +export type RunSubscriptionOptions = RunShapeStreamOptions & { + provider: RunShapeProvider; + streamFactory: StreamSubscriptionFactory; +}; + export class RunSubscription { private abortController: AbortController; private unsubscribeShape?: () => void; private stream: AsyncIterableStream>; private packetCache = new Map(); + private _closeOnComplete: boolean; + private _isRunComplete = false; - constructor( - private url: string, - private options?: RunShapeStreamOptions - ) { + constructor(private options: RunSubscriptionOptions) { this.abortController = new AbortController(); + this._closeOnComplete = + typeof options.closeOnComplete === "undefined" ? true : options.closeOnComplete; const source = new ReadableStream({ start: async (controller) => { - this.unsubscribeShape = await zodShapeStream( - SubscribeRunRawShape, - this.url, - async (shape) => { - controller.enqueue(shape); - if ( - this.options?.closeOnComplete && - shape.completedAt && - !this.abortController.signal.aborted - ) { - controller.close(); - this.abortController.abort(); - } - }, - { - signal: this.abortController.signal, - fetchClient: this.options?.fetchClient, - headers: this.options?.headers, + this.unsubscribeShape = await this.options.provider.onShape(async (shape) => { + controller.enqueue(shape); + + this._isRunComplete = !!shape.completedAt; + + if ( + this._closeOnComplete && + this._isRunComplete && + !this.abortController.signal.aborted + ) { + controller.close(); + this.abortController.abort(); } - ); + }); }, cancel: () => { this.unsubscribe(); @@ -121,6 +240,48 @@ export class RunSubscription { return this.stream.getReader(); } + withStreams>(): AsyncIterableStream< + RunWithStreamsResult, TStreams> + > { + // Keep track of which streams we've already subscribed to + const activeStreams = new Set(); + + return createAsyncIterableStream(this.stream, { + transform: async (run, controller) => { + controller.enqueue({ + type: "run", + run, + }); + + // Check for stream metadata + if (run.metadata) { + for (const [key] of Object.entries(run.metadata)) { + if (key.startsWith("$$stream.")) { + const streamKey = key.replace("$$stream.", "") as keyof TStreams; + + if (!activeStreams.has(key)) { + activeStreams.add(key); + + const subscription = this.options.streamFactory.createSubscription( + run.id, + streamKey.toString() + ); + + await subscription.subscribe(async (chunk) => { + controller.enqueue({ + type: streamKey, + chunk: chunk as TStreams[typeof streamKey], + run, + } as StreamPartResult, TStreams>); + }); + } + } + } + } + }, + }); + } + private async transformRunShape(row: SubscribeRunRawShape): Promise> { const payloadPacket = row.payloadType ? ({ data: row.payload ?? undefined, dataType: row.payloadType } satisfies IOPacket) diff --git a/packages/core/src/v3/index.ts b/packages/core/src/v3/index.ts index cb94f42c84..17cba85590 100644 --- a/packages/core/src/v3/index.ts +++ b/packages/core/src/v3/index.ts @@ -12,6 +12,7 @@ export * from "./task-context-api.js"; export * from "./apiClientManager-api.js"; export * from "./usage-api.js"; export * from "./run-metadata-api.js"; +export * from "./wait-until-api.js"; export * from "./timeout-api.js"; export * from "./schemas/index.js"; export { SemanticInternalAttributes } from "./semanticInternalAttributes.js"; diff --git a/packages/core/src/v3/runMetadata/index.ts b/packages/core/src/v3/runMetadata/index.ts index f8590bfafc..df22321dd1 100644 --- a/packages/core/src/v3/runMetadata/index.ts +++ b/packages/core/src/v3/runMetadata/index.ts @@ -53,6 +53,14 @@ export class RunMetadataAPI implements RunMetadataManager { return this.#getManager().update(metadata); } + public stream( + key: string, + value: AsyncIterable, + signal?: AbortSignal + ): Promise> { + return this.#getManager().stream(key, value, signal); + } + flush(requestOptions?: ApiRequestOptions): Promise { return this.#getManager().flush(requestOptions); } diff --git a/packages/core/src/v3/runMetadata/manager.ts b/packages/core/src/v3/runMetadata/manager.ts index ed5b59ad7f..1a8e51a7af 100644 --- a/packages/core/src/v3/runMetadata/manager.ts +++ b/packages/core/src/v3/runMetadata/manager.ts @@ -5,11 +5,16 @@ import { apiClientManager } from "../apiClientManager-api.js"; import { taskContext } from "../task-context-api.js"; import { ApiRequestOptions } from "../zodfetch.js"; import { RunMetadataManager } from "./types.js"; +import { MetadataStream } from "./metadataStream.js"; export class StandardMetadataManager implements RunMetadataManager { private flushTimeoutId: NodeJS.Timeout | null = null; private hasChanges: boolean = false; private store: Record | undefined; + // Add a Map to track active streams + private activeStreams = new Map>(); + + constructor(private streamsBaseUrl: string) {} public enterWithMetadata(metadata: Record): void { this.store = metadata ?? {}; @@ -86,6 +91,67 @@ export class StandardMetadataManager implements RunMetadataManager { this.store = metadata; } + public async stream( + key: string, + value: AsyncIterable, + signal?: AbortSignal + ): Promise> { + const runId = taskContext.ctx?.run.id; + + if (!runId) { + return value; + } + + // Add the key to the special stream metadata object + this.setKey(`$$stream.${key}`, key); + + await this.flush(); + + const streamInstance = new MetadataStream({ + key, + runId, + iterator: value[Symbol.asyncIterator](), + baseUrl: this.streamsBaseUrl, + signal, + }); + + this.activeStreams.set(key, streamInstance); + + // Clean up when stream completes + streamInstance.wait().finally(() => this.activeStreams.delete(key)); + + return streamInstance; + } + + public hasActiveStreams(): boolean { + return this.activeStreams.size > 0; + } + + // Waits for all the streams to finish + public async waitForAllStreams(timeout: number = 30_000): Promise { + if (this.activeStreams.size === 0) { + return; + } + + const promises = Array.from(this.activeStreams.values()); + + try { + await Promise.race([ + Promise.allSettled(promises), + new Promise((resolve, _) => setTimeout(() => resolve(), timeout)), + ]); + } catch (error) { + console.error("Error waiting for streams to finish:", error); + + // If we time out, abort all remaining streams + for (const [key, promise] of this.activeStreams.entries()) { + // We can add abort logic here if needed + this.activeStreams.delete(key); + } + throw error; + } + } + public async flush(requestOptions?: ApiRequestOptions): Promise { const runId = taskContext.ctx?.run.id; diff --git a/packages/core/src/v3/runMetadata/metadataStream.ts b/packages/core/src/v3/runMetadata/metadataStream.ts new file mode 100644 index 0000000000..d69fd22e2f --- /dev/null +++ b/packages/core/src/v3/runMetadata/metadataStream.ts @@ -0,0 +1,86 @@ +export type MetadataOptions = { + baseUrl: string; + runId: string; + key: string; + iterator: AsyncIterator; + signal?: AbortSignal; +}; + +export class MetadataStream { + private controller = new AbortController(); + private serverQueue: Array>> = []; + private consumerQueue: Array>> = []; + private serverIterator: AsyncIterator; + private consumerIterator: AsyncIterator; + private streamPromise: Promise; + + constructor(private options: MetadataOptions) { + const { serverIterator, consumerIterator } = this.createTeeIterators(); + this.serverIterator = serverIterator; + this.consumerIterator = consumerIterator; + + this.streamPromise = this.initializeServerStream(); + } + + private createTeeIterators() { + const teeIterator = (queue: Array>>): AsyncIterator => ({ + next: () => { + if (queue.length === 0) { + const result = this.options.iterator.next(); + this.serverQueue.push(result); + this.consumerQueue.push(result); + } + return queue.shift()!; + }, + }); + + return { + serverIterator: teeIterator(this.serverQueue), + consumerIterator: teeIterator(this.consumerQueue), + }; + } + + private initializeServerStream(): Promise { + const serverIterator = this.serverIterator; + + // TODO: Why is this only sending stuff to the server at the end of the run? + const serverStream = new ReadableStream({ + async pull(controller) { + try { + const { value, done } = await serverIterator.next(); + if (done) { + controller.close(); + return; + } + + controller.enqueue(JSON.stringify(value) + "\n"); + } catch (err) { + controller.error(err); + } + }, + cancel: () => this.controller.abort(), + }); + + return fetch( + `${this.options.baseUrl}/realtime/v1/streams/${this.options.runId}/${this.options.key}`, + { + method: "POST", + headers: {}, + body: serverStream, + // @ts-expect-error + duplex: "half", + signal: this.controller.signal, + } + ).catch((error) => { + console.error("Error in stream:", error); + }); + } + + public async wait(): Promise { + return this.streamPromise.then(() => void 0); + } + + public [Symbol.asyncIterator]() { + return this.consumerIterator; + } +} diff --git a/packages/core/src/v3/runMetadata/noopManager.ts b/packages/core/src/v3/runMetadata/noopManager.ts index d054844ab1..3eb23692d6 100644 --- a/packages/core/src/v3/runMetadata/noopManager.ts +++ b/packages/core/src/v3/runMetadata/noopManager.ts @@ -3,6 +3,9 @@ import { ApiRequestOptions } from "../zodfetch.js"; import type { RunMetadataManager } from "./types.js"; export class NoopRunMetadataManager implements RunMetadataManager { + stream(key: string, value: AsyncIterable): Promise> { + throw new Error("Method not implemented."); + } flush(requestOptions?: ApiRequestOptions): Promise { throw new Error("Method not implemented."); } diff --git a/packages/core/src/v3/runMetadata/types.ts b/packages/core/src/v3/runMetadata/types.ts index c827b4a15a..3ef649df8f 100644 --- a/packages/core/src/v3/runMetadata/types.ts +++ b/packages/core/src/v3/runMetadata/types.ts @@ -10,4 +10,5 @@ export interface RunMetadataManager { deleteKey(key: string): void; update(metadata: Record): void; flush(requestOptions?: ApiRequestOptions): Promise; + stream(key: string, value: AsyncIterable, signal?: AbortSignal): Promise>; } diff --git a/packages/core/src/v3/runtime/devRuntimeManager.ts b/packages/core/src/v3/runtime/devRuntimeManager.ts index 07a12a83ba..07e933edf4 100644 --- a/packages/core/src/v3/runtime/devRuntimeManager.ts +++ b/packages/core/src/v3/runtime/devRuntimeManager.ts @@ -1,11 +1,10 @@ import { BatchTaskRunExecutionResult, TaskRunContext, - TaskRunExecution, TaskRunExecutionResult, } from "../schemas/index.js"; -import { RuntimeManager } from "./manager.js"; import { unboundedTimeout } from "../utils/timers.js"; +import { RuntimeManager } from "./manager.js"; export class DevRuntimeManager implements RuntimeManager { _taskWaits: Map void }> = new Map(); diff --git a/packages/core/src/v3/utils/getEnv.ts b/packages/core/src/v3/utils/getEnv.ts index 65ddee39df..47c3b4bfc2 100644 --- a/packages/core/src/v3/utils/getEnv.ts +++ b/packages/core/src/v3/utils/getEnv.ts @@ -1,10 +1,10 @@ -export function getEnvVar(name: string): string | undefined { +export function getEnvVar(name: string, defaultValue?: string): string | undefined { // This could run in a non-Node.js environment (Bun, Deno, CF Worker, etc.), so don't just assume process.env is a thing if (typeof process !== "undefined" && typeof process.env === "object" && process.env !== null) { - return process.env[name]; + return process.env[name] ?? defaultValue; } - return; + return defaultValue; } export function getNumberEnvVar(name: string, defaultValue?: number): number | undefined { diff --git a/packages/core/src/v3/utils/globals.ts b/packages/core/src/v3/utils/globals.ts index d24ffd9554..ba4c09a1a2 100644 --- a/packages/core/src/v3/utils/globals.ts +++ b/packages/core/src/v3/utils/globals.ts @@ -7,6 +7,7 @@ import { TaskCatalog } from "../task-catalog/catalog.js"; import { TaskContext } from "../taskContext/types.js"; import { TimeoutManager } from "../timeout/types.js"; import { UsageManager } from "../usage/types.js"; +import { WaitUntilManager } from "../waitUntil/types.js"; import { _globalThis } from "./platform.js"; const GLOBAL_TRIGGER_DOT_DEV_KEY = Symbol.for(`dev.trigger.ts.api`); @@ -59,4 +60,5 @@ type TriggerDotDevGlobalAPI = { ["api-client"]?: ApiClientConfiguration; ["run-metadata"]?: RunMetadataManager; ["timeout"]?: TimeoutManager; + ["wait-until"]?: WaitUntilManager; }; diff --git a/packages/core/src/v3/wait-until-api.ts b/packages/core/src/v3/wait-until-api.ts new file mode 100644 index 0000000000..67a6e3aaac --- /dev/null +++ b/packages/core/src/v3/wait-until-api.ts @@ -0,0 +1,5 @@ +// Split module-level variable definition into separate files to allow +// tree-shaking on each api instance. +import { WaitUntilAPI } from "./waitUntil/index.js"; + +export const waitUntil = WaitUntilAPI.getInstance(); diff --git a/packages/core/src/v3/waitUntil/index.ts b/packages/core/src/v3/waitUntil/index.ts new file mode 100644 index 0000000000..2a0686850a --- /dev/null +++ b/packages/core/src/v3/waitUntil/index.ts @@ -0,0 +1,54 @@ +import { getGlobal, registerGlobal } from "../utils/globals.js"; +import { MaybeDeferredPromise, WaitUntilManager } from "./types.js"; + +const API_NAME = "wait-until"; + +class NoopManager implements WaitUntilManager { + register(promise: MaybeDeferredPromise): void { + // noop + } + + blockUntilSettled(timeout: number): Promise { + return Promise.resolve(); + } + + requiresResolving(): boolean { + return false; + } +} + +const NOOP_MANAGER = new NoopManager(); + +export class WaitUntilAPI implements WaitUntilManager { + private static _instance?: WaitUntilAPI; + + private constructor() {} + + public static getInstance(): WaitUntilAPI { + if (!this._instance) { + this._instance = new WaitUntilAPI(); + } + + return this._instance; + } + + setGlobalManager(manager: WaitUntilManager): boolean { + return registerGlobal(API_NAME, manager); + } + + #getManager(): WaitUntilManager { + return getGlobal(API_NAME) ?? NOOP_MANAGER; + } + + register(promise: MaybeDeferredPromise): void { + return this.#getManager().register(promise); + } + + blockUntilSettled(timeout: number): Promise { + return this.#getManager().blockUntilSettled(timeout); + } + + requiresResolving(): boolean { + return this.#getManager().requiresResolving(); + } +} diff --git a/packages/core/src/v3/waitUntil/manager.ts b/packages/core/src/v3/waitUntil/manager.ts new file mode 100644 index 0000000000..b58f518abb --- /dev/null +++ b/packages/core/src/v3/waitUntil/manager.ts @@ -0,0 +1,34 @@ +import { MaybeDeferredPromise, WaitUntilManager } from "./types.js"; + +export class StandardWaitUntilManager implements WaitUntilManager { + private maybeDeferredPromises: Set = new Set(); + + register(promise: MaybeDeferredPromise): void { + this.maybeDeferredPromises.add(promise); + } + + async blockUntilSettled(timeout: number): Promise { + if (this.promisesRequringResolving.length === 0) { + return; + } + + const promises = this.promisesRequringResolving.map((p) => + typeof p.promise === "function" ? p.promise() : p.promise + ); + + await Promise.race([ + Promise.allSettled(promises), + new Promise((resolve, _) => setTimeout(() => resolve(), timeout)), + ]); + + this.maybeDeferredPromises.clear(); + } + + requiresResolving(): boolean { + return this.promisesRequringResolving.length > 0; + } + + private get promisesRequringResolving(): MaybeDeferredPromise[] { + return Array.from(this.maybeDeferredPromises).filter((p) => p.requiresResolving()); + } +} diff --git a/packages/core/src/v3/waitUntil/types.ts b/packages/core/src/v3/waitUntil/types.ts new file mode 100644 index 0000000000..e142b31bec --- /dev/null +++ b/packages/core/src/v3/waitUntil/types.ts @@ -0,0 +1,10 @@ +export type MaybeDeferredPromise = { + requiresResolving(): boolean; + promise: Promise | (() => Promise); +}; + +export interface WaitUntilManager { + register(promise: MaybeDeferredPromise): void; + blockUntilSettled(timeout: number): Promise; + requiresResolving(): boolean; +} diff --git a/packages/core/src/v3/workers/index.ts b/packages/core/src/v3/workers/index.ts index 0323c223da..504302dde2 100644 --- a/packages/core/src/v3/workers/index.ts +++ b/packages/core/src/v3/workers/index.ts @@ -15,3 +15,4 @@ export { DevUsageManager } from "../usage/devUsageManager.js"; export { ProdUsageManager, type ProdUsageManagerOptions } from "../usage/prodUsageManager.js"; export { UsageTimeoutManager } from "../timeout/usageTimeoutManager.js"; export { StandardMetadataManager } from "../runMetadata/manager.js"; +export { StandardWaitUntilManager } from "../waitUntil/manager.js"; diff --git a/packages/core/src/v3/workers/taskExecutor.ts b/packages/core/src/v3/workers/taskExecutor.ts index e927db9612..6987fe8d80 100644 --- a/packages/core/src/v3/workers/taskExecutor.ts +++ b/packages/core/src/v3/workers/taskExecutor.ts @@ -3,7 +3,7 @@ import { VERSION } from "../../version.js"; import { ApiError, RateLimitError } from "../apiClient/errors.js"; import { ConsoleInterceptor } from "../consoleInterceptor.js"; import { parseError, sanitizeError, TaskPayloadParsedError } from "../errors.js"; -import { runMetadata, TriggerConfig } from "../index.js"; +import { runMetadata, TriggerConfig, waitUntil } from "../index.js"; import { recordSpanException, TracingSDK } from "../otel/index.js"; import { ServerBackgroundWorker, @@ -223,6 +223,7 @@ export class TaskExecutor { } } finally { await this.#callTaskCleanup(parsedPayload, ctx, initOutput, signal); + await this.#blockForWaitUntil(); } }); }, @@ -494,6 +495,24 @@ export class TaskExecutor { }); } + async #blockForWaitUntil() { + if (!waitUntil.requiresResolving()) { + return; + } + + return this._tracer.startActiveSpan( + "waitUntil", + async (span) => { + return await waitUntil.blockUntilSettled(30_000); + }, + { + attributes: { + [SemanticInternalAttributes.STYLE_ICON]: "clock", + }, + } + ); + } + async #handleError( execution: TaskRunExecution, error: unknown, diff --git a/packages/core/test/runStream.test.ts b/packages/core/test/runStream.test.ts new file mode 100644 index 0000000000..2ccbef5ab7 --- /dev/null +++ b/packages/core/test/runStream.test.ts @@ -0,0 +1,610 @@ +import { describe, it, expect } from "vitest"; +import { + AnyRunShape, + RunSubscription, + StreamSubscription, + StreamSubscriptionFactory, + type RunShapeProvider, +} from "../src/v3/apiClient/runStream.js"; +import type { SubscribeRunRawShape } from "../src/v3/schemas/api.js"; + +// Test implementations +class TestStreamSubscription implements StreamSubscription { + constructor(private chunks: unknown[]) {} + + async subscribe(onChunk: (chunk: unknown) => Promise): Promise<() => void> { + for (const chunk of this.chunks) { + await onChunk(chunk); + } + return () => {}; + } +} + +class TestStreamSubscriptionFactory implements StreamSubscriptionFactory { + private streams = new Map(); + + setStreamChunks(runId: string, streamKey: string, chunks: unknown[]) { + this.streams.set(`${runId}:${streamKey}`, chunks); + } + + createSubscription(runId: string, streamKey: string): StreamSubscription { + const chunks = this.streams.get(`${runId}:${streamKey}`) ?? []; + return new TestStreamSubscription(chunks); + } +} + +// Create a real test provider that uses an array of shapes +class TestShapeProvider implements RunShapeProvider { + private shapes: SubscribeRunRawShape[]; + private unsubscribed = false; + + constructor(shapes: SubscribeRunRawShape[]) { + this.shapes = shapes; + } + + async onShape(callback: (shape: SubscribeRunRawShape) => Promise): Promise<() => void> { + // Process all shapes immediately + for (const shape of this.shapes) { + if (this.unsubscribed) break; + await callback(shape); + } + + return () => { + this.unsubscribed = true; + }; + } +} + +// Add this new provider that can emit shapes over time +class DelayedTestShapeProvider implements RunShapeProvider { + private shapes: SubscribeRunRawShape[]; + private unsubscribed = false; + private currentShapeIndex = 0; + + constructor(shapes: SubscribeRunRawShape[]) { + this.shapes = shapes; + } + + async onShape(callback: (shape: SubscribeRunRawShape) => Promise): Promise<() => void> { + // Only emit the first shape immediately + if (this.shapes.length > 0) { + await callback(this.shapes[this.currentShapeIndex++]); + } + + // Set up an interval to emit remaining shapes + const interval = setInterval(async () => { + if (this.unsubscribed || this.currentShapeIndex >= this.shapes.length) { + clearInterval(interval); + return; + } + await callback(this.shapes[this.currentShapeIndex++]); + }, 100); + + return () => { + this.unsubscribed = true; + clearInterval(interval); + }; + } +} + +describe("RunSubscription", () => { + it("should handle basic run subscription", async () => { + const shapes = [ + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "test-task", + status: "COMPLETED_SUCCESSFULLY", + createdAt: new Date(), + updatedAt: new Date(), + completedAt: new Date(), + number: 1, + usageDurationMs: 100, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + }, + ]; + + const subscription = new RunSubscription({ + provider: new TestShapeProvider(shapes), + streamFactory: new TestStreamSubscriptionFactory(), + closeOnComplete: true, + }); + + const results = await convertAsyncIterableToArray(subscription); + + expect(results).toHaveLength(1); + expect(results[0]).toMatchObject({ + id: "run_123", + taskIdentifier: "test-task", + status: "COMPLETED", + }); + }); + + it("should handle payload and outputs", async () => { + const shapes: SubscribeRunRawShape[] = [ + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "test-task", + status: "COMPLETED_SUCCESSFULLY", + createdAt: new Date(), + updatedAt: new Date(), + completedAt: new Date(), + number: 1, + usageDurationMs: 100, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + payload: JSON.stringify({ test: "payload" }), + payloadType: "application/json", + output: JSON.stringify({ test: "output" }), + outputType: "application/json", + }, + ]; + + const subscription = new RunSubscription({ + provider: new TestShapeProvider(shapes), + streamFactory: new TestStreamSubscriptionFactory(), + closeOnComplete: true, + }); + + const results = await convertAsyncIterableToArray(subscription); + + expect(results).toHaveLength(1); + expect(results[0]).toMatchObject({ + id: "run_123", + taskIdentifier: "test-task", + status: "COMPLETED", + payload: { test: "payload" }, + output: { test: "output" }, + }); + }); + + it("should keep stream open when closeOnComplete is false", async () => { + const shapes: SubscribeRunRawShape[] = [ + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "test-task", + status: "EXECUTING", + createdAt: new Date(), + updatedAt: new Date(), + completedAt: new Date(), + number: 1, + usageDurationMs: 100, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + }, + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "test-task", + status: "COMPLETED_SUCCESSFULLY", + createdAt: new Date(), + updatedAt: new Date(), + completedAt: new Date(), + number: 1, + usageDurationMs: 200, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + }, + ]; + + const subscription = new RunSubscription({ + provider: new DelayedTestShapeProvider(shapes), + streamFactory: new TestStreamSubscriptionFactory(), + closeOnComplete: false, + }); + + // Collect 2 results + const results = await collectNResults(subscription, 2); + + expect(results).toHaveLength(2); + expect(results[0]).toMatchObject({ + id: "run_123", + taskIdentifier: "test-task", + status: "EXECUTING", + }); + expect(results[1]).toMatchObject({ + id: "run_123", + taskIdentifier: "test-task", + status: "COMPLETED", + }); + }); +}); + +describe("RunSubscription withStreams", () => { + it("should handle stream data", async () => { + const streamFactory = new TestStreamSubscriptionFactory(); + + // Set up test chunks + streamFactory.setStreamChunks("run_123", "openai", [ + { id: "chunk1", content: "Hello" }, + { id: "chunk2", content: "World" }, + ]); + + const shapes = [ + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "openai-streaming", + status: "EXECUTING", + createdAt: new Date(), + updatedAt: new Date(), + number: 1, + usageDurationMs: 100, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + metadata: JSON.stringify({ + "$$stream.openai": "openai", + }), + metadataType: "application/json", + }, + ]; + + const subscription = new RunSubscription({ + provider: new TestShapeProvider(shapes), + streamFactory, + }); + + const results = await collectNResults( + subscription.withStreams<{ openai: { id: string; content: string } }>(), + 3 // 1 run + 2 stream chunks + ); + + expect(results).toHaveLength(3); + expect(results[0]).toMatchObject({ + type: "run", + run: { id: "run_123", taskIdentifier: "openai-streaming", status: "EXECUTING" }, + }); + expect(results[1]).toMatchObject({ + type: "openai", + chunk: { id: "chunk1", content: "Hello" }, + run: { id: "run_123", taskIdentifier: "openai-streaming", status: "EXECUTING" }, + }); + expect(results[2]).toMatchObject({ + type: "openai", + chunk: { id: "chunk2", content: "World" }, + run: { id: "run_123", taskIdentifier: "openai-streaming", status: "EXECUTING" }, + }); + }); + + it("should only create one stream for multiple runs of the same id", async () => { + const streamFactory = new TestStreamSubscriptionFactory(); + let streamCreationCount = 0; + + // Override createSubscription to count calls + const originalCreate = streamFactory.createSubscription.bind(streamFactory); + streamFactory.createSubscription = (runId: string, streamKey: string) => { + streamCreationCount++; + return originalCreate(runId, streamKey); + }; + + // Set up test chunks + streamFactory.setStreamChunks("run_123", "openai", [ + { id: "chunk1", content: "Hello" }, + { id: "chunk2", content: "World" }, + ]); + + const shapes = [ + // First run update + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "openai-streaming", + status: "EXECUTING", + createdAt: new Date(), + updatedAt: new Date(), + number: 1, + usageDurationMs: 100, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + metadata: JSON.stringify({ + "$$stream.openai": "openai", + }), + metadataType: "application/json", + }, + // Second run update with same stream key + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "openai-streaming", + status: "EXECUTING", + createdAt: new Date(), + updatedAt: new Date(), + number: 1, + usageDurationMs: 200, // Different to show it's a new update + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + metadata: JSON.stringify({ + "$$stream.openai": "openai", + }), + metadataType: "application/json", + }, + ]; + + const subscription = new RunSubscription({ + provider: new TestShapeProvider(shapes), + streamFactory, + }); + + const results = await collectNResults( + subscription.withStreams<{ openai: { id: string; content: string } }>(), + 4 // 2 runs + 2 stream chunks + ); + + // Verify we only created one stream + expect(streamCreationCount).toBe(1); + + // Verify we got all the expected events + expect(results).toHaveLength(4); + expect(results[0]).toMatchObject({ + type: "run", + run: { + id: "run_123", + taskIdentifier: "openai-streaming", + status: "EXECUTING", + durationMs: 100, + }, + }); + expect(results[1]).toMatchObject({ + type: "openai", + chunk: { id: "chunk1", content: "Hello" }, + run: { id: "run_123", durationMs: 100 }, + }); + expect(results[2]).toMatchObject({ + type: "openai", + chunk: { id: "chunk2", content: "World" }, + run: { id: "run_123", durationMs: 100 }, + }); + expect(results[3]).toMatchObject({ + type: "run", + run: { + id: "run_123", + taskIdentifier: "openai-streaming", + status: "EXECUTING", + durationMs: 200, + }, + }); + }); + + it("should handle multiple streams simultaneously", async () => { + const streamFactory = new TestStreamSubscriptionFactory(); + + // Set up test chunks for two different streams + streamFactory.setStreamChunks("run_123", "openai", [ + { id: "openai1", content: "Hello" }, + { id: "openai2", content: "World" }, + ]); + streamFactory.setStreamChunks("run_123", "anthropic", [ + { id: "claude1", message: "Hi" }, + { id: "claude2", message: "There" }, + ]); + + const shapes = [ + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "multi-streaming", + status: "EXECUTING", + createdAt: new Date(), + updatedAt: new Date(), + number: 1, + usageDurationMs: 100, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + metadata: JSON.stringify({ + "$$stream.openai": "openai", + "$$stream.anthropic": "anthropic", + }), + metadataType: "application/json", + }, + ]; + + const subscription = new RunSubscription({ + provider: new TestShapeProvider(shapes), + streamFactory, + }); + + const results = await collectNResults( + subscription.withStreams<{ + openai: { id: string; content: string }; + anthropic: { id: string; message: string }; + }>(), + 5 // 1 run + 2 openai chunks + 2 anthropic chunks + ); + + expect(results).toHaveLength(5); + expect(results[0]).toMatchObject({ + type: "run", + run: { id: "run_123", taskIdentifier: "multi-streaming", status: "EXECUTING" }, + }); + + // Filter and verify openai chunks + const openaiChunks = results.filter((r) => r.type === "openai"); + expect(openaiChunks).toHaveLength(2); + expect(openaiChunks[0]).toMatchObject({ + type: "openai", + chunk: { id: "openai1", content: "Hello" }, + run: { id: "run_123" }, + }); + expect(openaiChunks[1]).toMatchObject({ + type: "openai", + chunk: { id: "openai2", content: "World" }, + run: { id: "run_123" }, + }); + + // Filter and verify anthropic chunks + const anthropicChunks = results.filter((r) => r.type === "anthropic"); + expect(anthropicChunks).toHaveLength(2); + expect(anthropicChunks[0]).toMatchObject({ + type: "anthropic", + chunk: { id: "claude1", message: "Hi" }, + run: { id: "run_123" }, + }); + expect(anthropicChunks[1]).toMatchObject({ + type: "anthropic", + chunk: { id: "claude2", message: "There" }, + run: { id: "run_123" }, + }); + }); + + it("should handle streams that appear in different run updates", async () => { + const streamFactory = new TestStreamSubscriptionFactory(); + + // Set up test chunks for two different streams + streamFactory.setStreamChunks("run_123", "openai", [ + { id: "openai1", content: "Hello" }, + { id: "openai2", content: "World" }, + ]); + streamFactory.setStreamChunks("run_123", "anthropic", [ + { id: "claude1", message: "Hi" }, + { id: "claude2", message: "There" }, + ]); + + const shapes = [ + // First run update - only has openai stream + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "multi-streaming", + status: "EXECUTING", + createdAt: new Date(), + updatedAt: new Date(), + number: 1, + usageDurationMs: 100, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + metadata: JSON.stringify({ + "$$stream.openai": "openai", + }), + metadataType: "application/json", + }, + // Second run update - adds anthropic stream + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "multi-streaming", + status: "EXECUTING", + createdAt: new Date(), + updatedAt: new Date(), + number: 1, + usageDurationMs: 200, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + metadata: JSON.stringify({ + "$$stream.openai": "openai", + "$$stream.anthropic": "anthropic", + }), + metadataType: "application/json", + }, + // Final run update - marks as complete + { + id: "123", + friendlyId: "run_123", + taskIdentifier: "multi-streaming", + status: "COMPLETED_SUCCESSFULLY", + createdAt: new Date(), + updatedAt: new Date(), + completedAt: new Date(), + number: 1, + usageDurationMs: 300, + costInCents: 0, + baseCostInCents: 0, + isTest: false, + runTags: [], + metadata: JSON.stringify({ + "$$stream.openai": "openai", + "$$stream.anthropic": "anthropic", + }), + metadataType: "application/json", + }, + ]; + + const subscription = new RunSubscription({ + provider: new TestShapeProvider(shapes), + streamFactory, + closeOnComplete: true, + }); + + const results = await collectNResults( + subscription.withStreams<{ + openai: { id: string; content: string }; + anthropic: { id: string; message: string }; + }>(), + 7 // 3 runs + 2 openai chunks + 2 anthropic chunks + ); + + expect(results).toHaveLength(7); + + // Verify run updates + const runUpdates = results.filter((r) => r.type === "run"); + expect(runUpdates).toHaveLength(3); + expect(runUpdates[2].run.status).toBe("COMPLETED"); + + // Verify openai chunks + const openaiChunks = results.filter((r) => r.type === "openai"); + expect(openaiChunks).toHaveLength(2); + + // Verify anthropic chunks + const anthropicChunks = results.filter((r) => r.type === "anthropic"); + expect(anthropicChunks).toHaveLength(2); + }); +}); + +export async function convertAsyncIterableToArray(iterable: AsyncIterable): Promise { + const result: T[] = []; + for await (const item of iterable) { + result.push(item); + } + return result; +} + +async function collectNResults( + iterable: AsyncIterable, + count: number, + timeoutMs: number = 1000 +): Promise { + const results: T[] = []; + const promise = new Promise((resolve) => { + (async () => { + for await (const result of iterable) { + results.push(result); + if (results.length === count) { + resolve(results); + break; + } + } + })(); + }); + + return Promise.race([ + promise, + new Promise((_, reject) => + setTimeout( + () => reject(new Error(`Timeout waiting for ${count} results after ${timeoutMs}ms`)), + timeoutMs + ) + ), + ]); +} diff --git a/packages/trigger-sdk/src/v3/index.ts b/packages/trigger-sdk/src/v3/index.ts index f88503218a..cc899eab85 100644 --- a/packages/trigger-sdk/src/v3/index.ts +++ b/packages/trigger-sdk/src/v3/index.ts @@ -4,11 +4,13 @@ export { retry, type RetryOptions } from "./retry.js"; export { queue } from "./shared.js"; export * from "./tasks.js"; export * from "./wait.js"; +export * from "./waitUntil.js"; export * from "./usage.js"; export * from "./idempotencyKeys.js"; export * from "./tags.js"; export * from "./metadata.js"; export * from "./timeout.js"; +export * from "./waitUntil.js"; export type { Context }; import type { Context } from "./shared.js"; diff --git a/packages/trigger-sdk/src/v3/metadata.ts b/packages/trigger-sdk/src/v3/metadata.ts index 2e081f4bf9..4ca12b0200 100644 --- a/packages/trigger-sdk/src/v3/metadata.ts +++ b/packages/trigger-sdk/src/v3/metadata.ts @@ -25,6 +25,7 @@ export const metadata = { save: saveMetadata, replace: replaceMetadata, flush: flushMetadata, + stream: stream, }; export type RunMetadata = Record; @@ -123,3 +124,11 @@ async function flushMetadata(requestOptions?: ApiRequestOptions): Promise await runMetadata.flush($requestOptions); } + +async function stream( + key: string, + value: AsyncIterable, + signal?: AbortSignal +): Promise> { + return runMetadata.stream(key, value, signal); +} diff --git a/packages/trigger-sdk/src/v3/waitUntil.ts b/packages/trigger-sdk/src/v3/waitUntil.ts new file mode 100644 index 0000000000..615fc67639 --- /dev/null +++ b/packages/trigger-sdk/src/v3/waitUntil.ts @@ -0,0 +1,13 @@ +import { waitUntil as core_waitUntil } from "@trigger.dev/core/v3"; + +/** + * waitUntil extends the lifetime of a task run until the provided promise settles. + * You can use this function to ensure that a task run does not complete until the promise resolves or rejects. + * + * Useful if you need to make sure something happens but you wait to continue doing other work in the task run. + * + * @param promise - The promise to wait for. + */ +export function waitUntil(promise: Promise) { + return core_waitUntil.register({ promise, requiresResolving: () => true }); +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 337ffd1c43..7f3712c31c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1576,6 +1576,9 @@ importers: next: specifier: 14.2.15 version: 14.2.15(@playwright/test@1.37.0)(react-dom@18.2.0)(react@18.3.1) + openai: + specifier: ^4.68.4 + version: 4.68.4(zod@3.22.3) react: specifier: ^18 version: 18.3.1 @@ -24302,6 +24305,27 @@ packages: - encoding dev: false + /openai@4.68.4(zod@3.22.3): + resolution: {integrity: sha512-LRinV8iU9VQplkr25oZlyrsYGPGasIwYN8KFMAAFTHHLHjHhejtJ5BALuLFrkGzY4wfbKhOhuT+7lcHZ+F3iEA==} + hasBin: true + peerDependencies: + zod: ^3.23.8 + peerDependenciesMeta: + zod: + optional: true + dependencies: + '@types/node': 18.19.20 + '@types/node-fetch': 2.6.4 + abort-controller: 3.0.0 + agentkeepalive: 4.5.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.6.12 + zod: 3.22.3 + transitivePeerDependencies: + - encoding + dev: false + /openid-client@5.6.4: resolution: {integrity: sha512-T1h3B10BRPKfcObdBklX639tVz+xh34O7GjofqrqiAQdm7eHsQ00ih18x6wuJ/E6FxdtS2u3FmUGPDeEcMwzNA==} requiresBuild: true diff --git a/references/nextjs-realtime/package.json b/references/nextjs-realtime/package.json index 22896709c7..3d60cfcaa9 100644 --- a/references/nextjs-realtime/package.json +++ b/references/nextjs-realtime/package.json @@ -21,6 +21,7 @@ "clsx": "^2.1.1", "lucide-react": "^0.451.0", "next": "14.2.15", + "openai": "^4.68.4", "react": "^18", "react-dom": "^18", "tailwind-merge": "^2.5.3", diff --git a/references/nextjs-realtime/src/trigger/ai.ts b/references/nextjs-realtime/src/trigger/ai.ts new file mode 100644 index 0000000000..ef69ebbef2 --- /dev/null +++ b/references/nextjs-realtime/src/trigger/ai.ts @@ -0,0 +1,75 @@ +import { OpenAI } from "openai"; +import { runs, logger, metadata, schemaTask, task, waitUntil } from "@trigger.dev/sdk/v3"; +import { z } from "zod"; +import { setTimeout } from "node:timers/promises"; + +const openai = new OpenAI(); + +export const openaiStreaming = schemaTask({ + id: "openai-streaming", + schema: z.object({ + model: z.string().default("chatgpt-4o-latest"), + prompt: z.string().default("Hello, how are you?"), + }), + run: async ({ model, prompt }) => { + logger.info("Running OpenAI model", { model, prompt }); + + const result = await openai.chat.completions.create({ + model: model, + messages: [ + { + role: "system", + content: prompt, + }, + ], + stream: true, + }); + + const stream = await metadata.stream("openai", result); + + for await (const chunk of stream) { + } + + await setTimeout(1000); + }, +}); + +type STREAMS = { openai: OpenAI.Chat.Completions.ChatCompletionChunk }; + +export const openaiConsumer = schemaTask({ + id: "openai-consumer", + schema: z.object({ + model: z.string().default("gpt-3.5-turbo"), + prompt: z.string().default("Hello, how are you?"), + }), + run: async ({ model, prompt }) => { + const handle = await openaiStreaming.trigger({ model, prompt }); + + let openaiCompletion = ""; + + for await (const part of runs.subscribeToRun(handle).withStreams()) { + switch (part.type) { + case "run": { + logger.info("Received run chunk", { run: part.run }); + break; + } + case "openai": { + logger.info("Received OpenAI chunk", { chunk: part.chunk, run: part.run }); + + if (part.chunk.choices[0].delta?.content) { + openaiCompletion += part.chunk.choices[0].delta.content; + } + } + } + } + + return { openaiCompletion }; + }, +}); + +export const waitUntilExamples = task({ + id: "wait-until-examples", + run: async () => { + await setTimeout(30_000); + }, +}); diff --git a/references/nextjs-realtime/trigger.config.ts b/references/nextjs-realtime/trigger.config.ts index 9820fb2223..4dcd27b930 100644 --- a/references/nextjs-realtime/trigger.config.ts +++ b/references/nextjs-realtime/trigger.config.ts @@ -1,6 +1,6 @@ import { defineConfig } from "@trigger.dev/sdk/v3"; export default defineConfig({ - project: "proj_bzhdaqhlymtuhlrcgbqy", + project: "proj_xyxzzpnujsnhjiskihvs", dirs: ["./src/trigger"], }); diff --git a/stream_fetch_test.js b/stream_fetch_test.js new file mode 100644 index 0000000000..86ad7cba62 --- /dev/null +++ b/stream_fetch_test.js @@ -0,0 +1,125 @@ +class MetadataStream { + constructor(options) { + this.options = options; + this.controller = new AbortController(); + this.serverQueue = []; + this.consumerQueue = []; + + const { serverIterator, consumerIterator } = this.createTeeIterators(); + this.serverIterator = serverIterator; + this.consumerIterator = consumerIterator; + + this.streamPromise = this.initializeServerStream(); + } + + createTeeIterators() { + const teeIterator = (queue) => ({ + next: () => { + if (queue.length === 0) { + const result = this.options.iterator.next(); + this.serverQueue.push(result); + this.consumerQueue.push(result); + } + return queue.shift(); + }, + }); + + return { + serverIterator: teeIterator(this.serverQueue), + consumerIterator: teeIterator(this.consumerQueue), + }; + } + + initializeServerStream() { + const serverIterator = this.serverIterator; + + const serverStream = new ReadableStream({ + async pull(controller) { + try { + const { value, done } = await serverIterator.next(); + if (done) { + controller.close(); + return; + } + + console.log("Server sent:", value, new Date().toISOString()); + + controller.enqueue(JSON.stringify(value) + "\n"); + } catch (err) { + controller.error(err); + } + }, + cancel: () => this.controller.abort(), + }); + + return fetch( + `${this.options.baseUrl}/realtime/v1/streams/${this.options.runId}/${this.options.key}`, + { + method: "POST", + headers: {}, + body: serverStream, + duplex: "half", + signal: this.controller.signal, + } + ).catch((error) => { + console.error("Error in stream:", error); + }); + } + + async wait() { + return this.streamPromise.then(() => void 0); + } + + [Symbol.asyncIterator]() { + return this.consumerIterator; + } +} + +// Example usage: +async function* generateSampleData() { + const items = [ + { type: "start", timestamp: Date.now() }, + { type: "progress", value: 25 }, + { type: "progress", value: 50 }, + { type: "progress", value: 75 }, + { type: "complete", timestamp: Date.now() }, + ]; + + for (const item of items) { + await new Promise((resolve) => setTimeout(resolve, 1000)); + yield item; + } +} + +async function runTest() { + const { OpenAI } = require("./references/v3-catalog/node_modules/openai"); + const openai = new OpenAI(); + + const result = await openai.chat.completions.create({ + model: "chatgpt-4o-latest", + messages: [ + { + role: "system", + content: "Can you summarize the plot of The Matrix?", + }, + ], + stream: true, + }); + + const stream = new MetadataStream({ + baseUrl: "http://localhost:3030", + runId: "test_run_1234", + key: "openai", + iterator: result[Symbol.asyncIterator](), + }); + + // Consume the stream + // for await (const value of stream) { + // console.log("Consumer received:", value, new Date().toISOString()); + // } + + await stream.wait(); + console.log("Stream completed", new Date().toISOString()); +} + +runTest().catch(console.error); diff --git a/stream_test.js b/stream_test.js new file mode 100644 index 0000000000..32ed362963 --- /dev/null +++ b/stream_test.js @@ -0,0 +1,41 @@ +const http = require("http"); + +const options = { + hostname: "localhost", + port: 3030, + path: "/realtime/v1/streams/test", + method: "POST", + headers: { + "Content-Type": "application/x-ndjson", + "Transfer-Encoding": "chunked", // Enable chunked transfer encoding + }, +}; + +const req = http.request(options, (res) => { + console.log(`STATUS: ${res.statusCode}`); + res.on("data", () => {}); + res.on("end", () => { + console.log("No more data in response."); + }); +}); + +req.on("error", (e) => { + console.error(`Problem with request: ${e.message}`); +}); + +// Function to send data with a delay +const sendData = (message, delay) => { + setTimeout(() => { + console.log(`Sending: ${message}`); + req.write(message + "\n"); + }, delay); +}; + +sendData('{"message": "chunk 1"}', 0); +sendData('{"message": "chunk 2"}', 1000); +sendData('{"message": "chunk 3"}', 2000); +sendData('{"message": "chunk 4"}', 3000); + +setTimeout(() => { + req.end(); +}, 4000); diff --git a/stream_test.sh b/stream_test.sh new file mode 100755 index 0000000000..914f728c3d --- /dev/null +++ b/stream_test.sh @@ -0,0 +1,13 @@ +#!/bin/zsh + +( + echo -n '{"message": "chunk 1"}\n' + sleep 1 + echo -n '{"message": "chunk 2"}\n' + sleep 1 + echo -n '{"message": "chunk 3"}\n' + sleep 1 + echo -n '{"message": "chunk 4"}\n' +) | curl -v -X POST "http://localhost:3030/realtime/v1/streams/express/test" \ + -H "Content-Type: application/x-ndjson" \ + --data-binary @- \ No newline at end of file From d39d272114844cdc60a2af330be6250c17c90ab9 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 1 Nov 2024 13:21:32 +0000 Subject: [PATCH 02/31] Handle realtime with large payloads or outputs #1451 --- apps/webapp/app/routes/api.v1.packets.$.ts | 42 +++++++++---------- packages/core/src/v3/apiClient/index.ts | 3 ++ packages/core/src/v3/apiClient/runStream.ts | 5 ++- packages/core/src/v3/utils/ioSerialization.ts | 23 ++++++---- .../src/app/runs/[id]/ClientRunDetails.tsx | 4 +- .../src/components/RunDetails.tsx | 4 +- .../nextjs-realtime/src/trigger/example.ts | 5 ++- references/nextjs-realtime/trigger.config.ts | 2 +- 8 files changed, 52 insertions(+), 36 deletions(-) diff --git a/apps/webapp/app/routes/api.v1.packets.$.ts b/apps/webapp/app/routes/api.v1.packets.$.ts index 9cb72d30eb..d88773d941 100644 --- a/apps/webapp/app/routes/api.v1.packets.$.ts +++ b/apps/webapp/app/routes/api.v1.packets.$.ts @@ -2,6 +2,7 @@ import type { ActionFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; import { z } from "zod"; import { authenticateApiRequest } from "~/services/apiAuth.server"; +import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; import { generatePresignedUrl } from "~/v3/r2.server"; const ParamsSchema = z.object({ @@ -39,28 +40,27 @@ export async function action({ request, params }: ActionFunctionArgs) { return json({ presignedUrl }); } -export async function loader({ request, params }: ActionFunctionArgs) { - // Next authenticate the request - const authenticationResult = await authenticateApiRequest(request); +export const loader = createLoaderApiRoute( + { + params: ParamsSchema, + allowJWT: true, + corsStrategy: "all", + }, + async ({ params, authentication }) => { + const filename = params["*"]; - if (!authenticationResult) { - return json({ error: "Invalid or Missing API key" }, { status: 401 }); - } + const presignedUrl = await generatePresignedUrl( + authentication.environment.project.externalRef, + authentication.environment.slug, + filename, + "GET" + ); - const parsedParams = ParamsSchema.parse(params); - const filename = parsedParams["*"]; - - const presignedUrl = await generatePresignedUrl( - authenticationResult.environment.project.externalRef, - authenticationResult.environment.slug, - filename, - "GET" - ); + if (!presignedUrl) { + return json({ error: "Failed to generate presigned URL" }, { status: 500 }); + } - if (!presignedUrl) { - return json({ error: "Failed to generate presigned URL" }, { status: 500 }); + // Caller can now use this URL to fetch that object. + return json({ presignedUrl }); } - - // Caller can now use this URL to fetch that object. - return json({ presignedUrl }); -} +); diff --git a/packages/core/src/v3/apiClient/index.ts b/packages/core/src/v3/apiClient/index.ts index e0fd38e895..09275c0e1f 100644 --- a/packages/core/src/v3/apiClient/index.ts +++ b/packages/core/src/v3/apiClient/index.ts @@ -598,6 +598,7 @@ export class ApiClient { return runShapeStream(`${this.baseUrl}/realtime/v1/runs/${runId}`, { closeOnComplete: true, headers: this.#getRealtimeHeaders(), + client: this, }); } @@ -611,6 +612,7 @@ export class ApiClient { { closeOnComplete: false, headers: this.#getRealtimeHeaders(), + client: this, } ); } @@ -619,6 +621,7 @@ export class ApiClient { return runShapeStream(`${this.baseUrl}/realtime/v1/batches/${batchId}`, { closeOnComplete: false, headers: this.#getRealtimeHeaders(), + client: this, }); } diff --git a/packages/core/src/v3/apiClient/runStream.ts b/packages/core/src/v3/apiClient/runStream.ts index 2ecbca9181..6769235b2e 100644 --- a/packages/core/src/v3/apiClient/runStream.ts +++ b/packages/core/src/v3/apiClient/runStream.ts @@ -8,6 +8,7 @@ import { IOPacket, parsePacket, } from "../utils/ioSerialization.js"; +import { ApiClient } from "./index.js"; import { AsyncIterableStream, createAsyncIterableStream, zodShapeStream } from "./stream.js"; export type RunShape = TRunTypes extends AnyRunTypes @@ -50,6 +51,7 @@ export type RunShapeStreamOptions = { fetchClient?: typeof fetch; closeOnComplete?: boolean; signal?: AbortSignal; + client?: ApiClient; }; export type StreamPartResult> = { @@ -84,6 +86,7 @@ export function runShapeStream( signal: options?.signal, } ), + ...options, }; return new RunSubscription($options); @@ -306,7 +309,7 @@ export class RunSubscription { return cachedResult; } - const result = await conditionallyImportAndParsePacket(packet); + const result = await conditionallyImportAndParsePacket(packet, this.options.client); this.packetCache.set(`${row.friendlyId}/${key}`, result); return result; diff --git a/packages/core/src/v3/utils/ioSerialization.ts b/packages/core/src/v3/utils/ioSerialization.ts index c16d07d61d..b1a8d4587f 100644 --- a/packages/core/src/v3/utils/ioSerialization.ts +++ b/packages/core/src/v3/utils/ioSerialization.ts @@ -7,6 +7,7 @@ import { apiClientManager } from "../apiClientManager-api.js"; import { zodfetch } from "../zodfetch.js"; import { z } from "zod"; import type { RetryOptions } from "../schemas/index.js"; +import { ApiClient } from "../apiClient/index.js"; export type IOPacket = { data?: string | undefined; @@ -36,8 +37,11 @@ export async function parsePacket(value: IOPacket): Promise { } } -export async function conditionallyImportAndParsePacket(value: IOPacket): Promise { - const importedPacket = await conditionallyImportPacket(value); +export async function conditionallyImportAndParsePacket( + value: IOPacket, + client?: ApiClient +): Promise { + const importedPacket = await conditionallyImportPacket(value, undefined, client); return await parsePacket(importedPacket); } @@ -159,19 +163,20 @@ async function exportPacket(packet: IOPacket, pathPrefix: string): Promise { if (packet.dataType !== "application/store") { return packet; } if (!tracer) { - return await importPacket(packet); + return await importPacket(packet, undefined, client); } else { const result = await tracer.startActiveSpan( "store.downloadPayload", async (span) => { - return await importPacket(packet, span); + return await importPacket(packet, span, client); }, { attributes: { @@ -209,16 +214,18 @@ export async function resolvePresignedPacketUrl( } } -async function importPacket(packet: IOPacket, span?: Span): Promise { +async function importPacket(packet: IOPacket, span?: Span, client?: ApiClient): Promise { if (!packet.data) { return packet; } - if (!apiClientManager.client) { + const $client = client ?? apiClientManager.client; + + if (!$client) { return packet; } - const presignedResponse = await apiClientManager.client.getPayloadUrl(packet.data); + const presignedResponse = await $client.getPayloadUrl(packet.data); const response = await zodfetch(z.any(), presignedResponse.presignedUrl, undefined, { retry: ioRetryOptions, diff --git a/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx b/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx index 486ca238e0..2ee2841cab 100644 --- a/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx +++ b/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx @@ -2,11 +2,11 @@ import RunDetails from "@/components/RunDetails"; import { Card, CardContent } from "@/components/ui/card"; -import { TriggerAuthContext, useRun } from "@trigger.dev/react-hooks"; +import { TriggerAuthContext, useRealtimeRun } from "@trigger.dev/react-hooks"; import type { exampleTask } from "@/trigger/example"; function RunDetailsWrapper({ runId }: { runId: string }) { - const { run, error } = useRun(runId, { refreshInterval: 1000 }); + const { run, error } = useRealtimeRun(runId); if (error) { return ( diff --git a/references/nextjs-realtime/src/components/RunDetails.tsx b/references/nextjs-realtime/src/components/RunDetails.tsx index 1f8b094865..64deb8940a 100644 --- a/references/nextjs-realtime/src/components/RunDetails.tsx +++ b/references/nextjs-realtime/src/components/RunDetails.tsx @@ -2,7 +2,7 @@ import { Badge } from "@/components/ui/badge"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { ScrollArea } from "@/components/ui/scroll-area"; import { exampleTask } from "@/trigger/example"; -import type { RetrieveRunResult } from "@trigger.dev/sdk/v3"; +import type { TaskRunShape } from "@trigger.dev/sdk/v3"; import { AlertTriangleIcon, CheckCheckIcon, XIcon } from "lucide-react"; function formatDate(date: Date | undefined) { @@ -17,7 +17,7 @@ function JsonDisplay({ data }: { data: any }) { ); } -export default function RunDetails({ record }: { record: RetrieveRunResult }) { +export default function RunDetails({ record }: { record: TaskRunShape }) { return ( diff --git a/references/nextjs-realtime/src/trigger/example.ts b/references/nextjs-realtime/src/trigger/example.ts index 90b8590570..031888d187 100644 --- a/references/nextjs-realtime/src/trigger/example.ts +++ b/references/nextjs-realtime/src/trigger/example.ts @@ -23,6 +23,9 @@ export const exampleTask = schemaTask({ metadata.set("status", { type: "finished", progress: 1.0 }); - return { message: "All good here!" }; + // Generate a return payload that is more than 128KB + const bigPayload = new Array(100000).fill("a".repeat(10)).join(""); + + return { message: bigPayload }; }, }); diff --git a/references/nextjs-realtime/trigger.config.ts b/references/nextjs-realtime/trigger.config.ts index 4dcd27b930..9820fb2223 100644 --- a/references/nextjs-realtime/trigger.config.ts +++ b/references/nextjs-realtime/trigger.config.ts @@ -1,6 +1,6 @@ import { defineConfig } from "@trigger.dev/sdk/v3"; export default defineConfig({ - project: "proj_xyxzzpnujsnhjiskihvs", + project: "proj_bzhdaqhlymtuhlrcgbqy", dirs: ["./src/trigger"], }); From 17adacf491e248c601d31dd1a5127d1257987773 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 1 Nov 2024 20:23:43 +0000 Subject: [PATCH 03/31] feat: optimize Redis stream handling with batching Add STREAM_ORIGIN to environment schema. Improve performance in RealtimeStreams by using TextDecoderStream for simpler text decoding and implementing batching of XADD commands for Redis streams. Limit stream size using MAXLEN option. Update environment variable repository with new variable type. Adjust import statements for Redis key and value types. --- apps/webapp/app/env.server.ts | 1 + .../app/services/realtimeStreams.server.ts | 63 +++++++++++-------- .../environmentVariablesRepository.server.ts | 4 ++ .../core/src/v3/runMetadata/metadataStream.ts | 2 + 4 files changed, 45 insertions(+), 25 deletions(-) diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index dca87c9299..ee2e2713d2 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -32,6 +32,7 @@ const EnvironmentSchema = z.object({ LOGIN_ORIGIN: z.string().default("http://localhost:3030"), APP_ORIGIN: z.string().default("http://localhost:3030"), API_ORIGIN: z.string().optional(), + STREAM_ORIGIN: z.string().optional(), ELECTRIC_ORIGIN: z.string().default("http://localhost:3060"), APP_ENV: z.string().default(process.env.NODE_ENV), SERVICE_NAME: z.string().default("trigger.dev webapp"), diff --git a/apps/webapp/app/services/realtimeStreams.server.ts b/apps/webapp/app/services/realtimeStreams.server.ts index 6ff6bbfbd7..7b3eff9a0e 100644 --- a/apps/webapp/app/services/realtimeStreams.server.ts +++ b/apps/webapp/app/services/realtimeStreams.server.ts @@ -1,4 +1,4 @@ -import Redis, { RedisOptions } from "ioredis"; +import Redis, { RedisKey, RedisOptions, RedisValue } from "ioredis"; import { logger } from "./logger.server"; export type RealtimeStreamsOptions = { @@ -107,55 +107,68 @@ export class RealtimeStreams { streamId: string ): Promise { const redis = new Redis(this.options.redis ?? {}); - const streamKey = `stream:${runId}:${streamId}`; - async function cleanup(stream?: TransformStream) { + async function cleanup() { try { await redis.quit(); - if (stream) { - const writer = stream.writable.getWriter(); - await writer.close(); // Catch in case the stream is already closed - } } catch (error) { logger.error("[RealtimeStreams][ingestData] Error in cleanup:", { error }); } } try { - const reader = stream.getReader(); - const decoder = new TextDecoder(); - let buffer = ""; + // Use TextDecoderStream to simplify text decoding + const textStream = stream.pipeThrough(new TextDecoderStream()); + const reader = textStream.getReader(); + + const batchSize = 10; // Adjust this value based on performance testing + let batchCommands: Array<[key: RedisKey, ...args: RedisValue[]]> = []; while (true) { const { done, value } = await reader.read(); - logger.debug("[RealtimeStreams][ingestData] Reading data", { streamKey, done }); - if (done) { - if (buffer) { - const data = JSON.parse(buffer); - await redis.xadd(streamKey, "*", "data", JSON.stringify(data)); - } break; } - buffer += decoder.decode(value, { stream: true }); - const lines = buffer.split("\n"); - buffer = lines.pop() || ""; + logger.debug("[RealtimeStreams][ingestData] Reading data", { streamKey, value }); + + // 'value' is a string containing the decoded text + const lines = value.split("\n"); for (const line of lines) { if (line.trim()) { - const data = JSON.parse(line); - - logger.debug("[RealtimeStreams][ingestData] Ingesting data", { streamKey }); - - await redis.xadd(streamKey, "*", "data", JSON.stringify(data)); + // Avoid unnecessary parsing; assume 'line' is already a JSON string + // Add XADD command with MAXLEN option to limit stream size + batchCommands.push([streamKey, "MAXLEN", "~", "1000", "*", "data", line]); + + if (batchCommands.length >= batchSize) { + // Send batch using a pipeline + const pipeline = redis.pipeline(); + for (const args of batchCommands) { + pipeline.xadd(...args); + } + await pipeline.exec(); + batchCommands = []; + } } } } - await redis.xadd(streamKey, "*", "data", JSON.stringify({ __end: true })); + // Send any remaining commands + if (batchCommands.length > 0) { + const pipeline = redis.pipeline(); + for (const args of batchCommands) { + pipeline.xadd(...args); + } + await pipeline.exec(); + } + + // Send the __end message to indicate the end of the stream + const endData = JSON.stringify({ __end: true }); + await redis.xadd(streamKey, "MAXLEN", "~", "1000", "*", "data", endData); + return new Response(null, { status: 200 }); } catch (error) { console.error("Error in ingestData:", error); diff --git a/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts b/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts index 99488dd754..876abb3e16 100644 --- a/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts +++ b/apps/webapp/app/v3/environmentVariables/environmentVariablesRepository.server.ts @@ -734,6 +734,10 @@ async function resolveBuiltInProdVariables(runtimeEnvironment: RuntimeEnvironmen key: "TRIGGER_API_URL", value: env.API_ORIGIN ?? env.APP_ORIGIN, }, + { + key: "TRIGGER_STREAM_URL", + value: env.STREAM_ORIGIN ?? env.API_ORIGIN ?? env.APP_ORIGIN, + }, { key: "TRIGGER_RUNTIME_WAIT_THRESHOLD_IN_MS", value: String(env.CHECKPOINT_THRESHOLD_IN_MS), diff --git a/packages/core/src/v3/runMetadata/metadataStream.ts b/packages/core/src/v3/runMetadata/metadataStream.ts index d69fd22e2f..62f8c4c56f 100644 --- a/packages/core/src/v3/runMetadata/metadataStream.ts +++ b/packages/core/src/v3/runMetadata/metadataStream.ts @@ -1,3 +1,5 @@ +import { run } from "node:test"; + export type MetadataOptions = { baseUrl: string; runId: string; From f714893dfa4c61920404d687a3bd965c4fb12143 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 1 Nov 2024 21:12:50 +0000 Subject: [PATCH 04/31] =?UTF-8?q?=F0=9F=94=A7=20chore:=20add=20dev=20depen?= =?UTF-8?q?dencies=20for=20bundle=20analysis?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pnpm-lock.yaml | 97 ++++++++++++++++++++++ references/nextjs-realtime/next.config.mjs | 7 +- references/nextjs-realtime/package.json | 1 + 3 files changed, 104 insertions(+), 1 deletion(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7f3712c31c..e844c29aef 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1598,6 +1598,9 @@ importers: specifier: 3.22.3 version: 3.22.3 devDependencies: + '@next/bundle-analyzer': + specifier: ^15.0.2 + version: 15.0.2 '@types/node': specifier: ^20 version: 20.14.14 @@ -4770,6 +4773,11 @@ packages: '@connectrpc/connect-node': 1.4.0(@bufbuild/protobuf@1.10.0)(@connectrpc/connect@1.4.0) dev: false + /@discoveryjs/json-ext@0.5.7: + resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==} + engines: {node: '>=10.0.0'} + dev: true + /@effect/platform@0.63.2(@effect/schema@0.72.2)(effect@3.7.2): resolution: {integrity: sha512-b39pVFw0NGo/tXjGShW7Yg0M+kG7bRrFR6+dQ3aIu99ePTkTp6bGb/kDB7n+dXsFFdIqHsQGYESeYcOQngxdFQ==} peerDependencies: @@ -7008,6 +7016,15 @@ packages: strict-event-emitter: 0.5.1 dev: false + /@next/bundle-analyzer@15.0.2: + resolution: {integrity: sha512-bV566k+rDsaqXSUgHBof0iMIDx5DWtLx/98jvYtqb9x85e+WJzv+8cpDvbjtxQMf7nFC/LUkPmpruj1cOKfz4A==} + dependencies: + webpack-bundle-analyzer: 4.10.1 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: true + /@next/env@14.1.0: resolution: {integrity: sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw==} dev: false @@ -8266,6 +8283,10 @@ packages: optionalDependencies: fsevents: 2.3.2 + /@polka/url@1.0.0-next.28: + resolution: {integrity: sha512-8LduaNlMZGwdZ6qWrKlfa+2M4gahzFkprZiAt2TF8uS0qQgBizKXpXURqvTJ4WtmupWxaLqjRb2UCTe72mu+Aw==} + dev: true + /@popperjs/core@2.11.8: resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==} dev: false @@ -17847,6 +17868,11 @@ packages: engines: {node: '>= 6'} dev: false + /commander@7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} + dev: true + /commander@9.5.0: resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==} engines: {node: ^12.20.0 || >=14} @@ -18446,6 +18472,10 @@ packages: resolution: {integrity: sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==} dev: false + /debounce@1.2.1: + resolution: {integrity: sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==} + dev: true + /debounce@2.0.0: resolution: {integrity: sha512-xRetU6gL1VJbs85Mc4FoEGSjQxzpdxRyFhe3lmWFyy2EzydIcD4xzUvRJMD+NPDfMwKNhxa3PvsIOU32luIWeA==} engines: {node: '>=18'} @@ -18860,6 +18890,10 @@ packages: resolution: {integrity: sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==} dev: true + /duplexer@0.1.2: + resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} + dev: true + /duplexify@3.7.1: resolution: {integrity: sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==} dependencies: @@ -21268,6 +21302,13 @@ packages: through2: 2.0.5 dev: true + /gzip-size@6.0.0: + resolution: {integrity: sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==} + engines: {node: '>=10'} + dependencies: + duplexer: 0.1.2 + dev: true + /har-schema@2.0.0: resolution: {integrity: sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==} engines: {node: '>=4'} @@ -21411,6 +21452,10 @@ packages: lru-cache: 7.18.3 dev: true + /html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + dev: true + /html-tags@3.3.1: resolution: {integrity: sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==} engines: {node: '>=8'} @@ -21995,6 +22040,11 @@ packages: engines: {node: '>=12'} dev: true + /is-plain-object@5.0.0: + resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} + engines: {node: '>=0.10.0'} + dev: true + /is-reference@3.0.1: resolution: {integrity: sha512-baJJdQLiYaJdvFbJqXrcGv3WU3QCzBlUcI5QhbesIm6/xPsvmO+2CDoi/GMOFBQEQm+PXkwOPrp9KK5ozZsp2w==} dependencies: @@ -23559,6 +23609,11 @@ packages: resolution: {integrity: sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw==} engines: {node: '>=10'} + /mrmime@2.0.0: + resolution: {integrity: sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==} + engines: {node: '>=10'} + dev: true + /ms@2.0.0: resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} @@ -24326,6 +24381,11 @@ packages: - encoding dev: false + /opener@1.5.2: + resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} + hasBin: true + dev: true + /openid-client@5.6.4: resolution: {integrity: sha512-T1h3B10BRPKfcObdBklX639tVz+xh34O7GjofqrqiAQdm7eHsQ00ih18x6wuJ/E6FxdtS2u3FmUGPDeEcMwzNA==} requiresBuild: true @@ -27364,6 +27424,15 @@ packages: resolution: {integrity: sha512-bBAoTn75tuKh83opmZ1VoyVoQIsvLCKzSxuasAxbnKofrT8eGyOEIaXSuNfhi/hI160+fwsR7ObcbBpOyzDvXg==} dev: false + /sirv@2.0.4: + resolution: {integrity: sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==} + engines: {node: '>= 10'} + dependencies: + '@polka/url': 1.0.0-next.28 + mrmime: 2.0.0 + totalist: 3.0.1 + dev: true + /sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} @@ -28714,6 +28783,11 @@ packages: resolution: {integrity: sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==} dev: false + /totalist@3.0.1: + resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} + engines: {node: '>=6'} + dev: true + /tough-cookie@2.5.0: resolution: {integrity: sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==} engines: {node: '>=0.8'} @@ -30465,6 +30539,29 @@ packages: /webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + /webpack-bundle-analyzer@4.10.1: + resolution: {integrity: sha512-s3P7pgexgT/HTUSYgxJyn28A+99mmLq4HsJepMPzu0R8ImJc52QNqaFYW1Z2z2uIb1/J3eYgaAWVpaC+v/1aAQ==} + engines: {node: '>= 10.13.0'} + hasBin: true + dependencies: + '@discoveryjs/json-ext': 0.5.7 + acorn: 8.12.1 + acorn-walk: 8.3.2 + commander: 7.2.0 + debounce: 1.2.1 + escape-string-regexp: 4.0.0 + gzip-size: 6.0.0 + html-escaper: 2.0.2 + is-plain-object: 5.0.0 + opener: 1.5.2 + picocolors: 1.0.1 + sirv: 2.0.4 + ws: 7.5.9 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: true + /webpack-sources@3.2.3: resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==} engines: {node: '>=10.13.0'} diff --git a/references/nextjs-realtime/next.config.mjs b/references/nextjs-realtime/next.config.mjs index 1df123ef7b..e6d93dc133 100644 --- a/references/nextjs-realtime/next.config.mjs +++ b/references/nextjs-realtime/next.config.mjs @@ -1,4 +1,6 @@ /** @type {import('next').NextConfig} */ +import NextBundleAnalyzer from "@next/bundle-analyzer"; + const nextConfig = { images: { remotePatterns: [ @@ -27,4 +29,7 @@ const nextConfig = { }, }; -export default nextConfig; +export default NextBundleAnalyzer({ + enabled: process.env.ANALYZE === "true", + openAnalyzer: true, +})(nextConfig); diff --git a/references/nextjs-realtime/package.json b/references/nextjs-realtime/package.json index 3d60cfcaa9..40b8e7da35 100644 --- a/references/nextjs-realtime/package.json +++ b/references/nextjs-realtime/package.json @@ -30,6 +30,7 @@ "zod": "3.22.3" }, "devDependencies": { + "@next/bundle-analyzer": "^15.0.2", "@types/node": "^20", "@types/react": "^18", "@types/react-dom": "^18", From ea84688e537a9ea891892002a995b4a3c69462b7 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 4 Nov 2024 13:03:40 +0000 Subject: [PATCH 05/31] add metadata tests and a few more utilties --- .../src/entryPoints/deploy-run-worker.ts | 4 + .../cli-v3/src/entryPoints/dev-run-worker.ts | 4 + packages/core/package.json | 1 + packages/core/src/v3/runMetadata/index.ts | 12 + packages/core/src/v3/runMetadata/manager.ts | 109 +++++++-- .../core/src/v3/runMetadata/metadataStream.ts | 6 +- .../core/src/v3/runMetadata/noopManager.ts | 9 + packages/core/src/v3/runMetadata/types.ts | 3 + .../core/test/standardMetadataManager.test.ts | 207 ++++++++++++++++++ packages/trigger-sdk/src/v3/metadata.ts | 15 ++ pnpm-lock.yaml | 3 + 11 files changed, 348 insertions(+), 25 deletions(-) create mode 100644 packages/core/test/standardMetadataManager.test.ts diff --git a/packages/cli-v3/src/entryPoints/deploy-run-worker.ts b/packages/cli-v3/src/entryPoints/deploy-run-worker.ts index 2a35f6c0eb..ef87e76a05 100644 --- a/packages/cli-v3/src/entryPoints/deploy-run-worker.ts +++ b/packages/cli-v3/src/entryPoints/deploy-run-worker.ts @@ -16,6 +16,7 @@ import { timeout, runMetadata, waitUntil, + apiClientManager, } from "@trigger.dev/core/v3"; import { TriggerTracer } from "@trigger.dev/core/v3/tracer"; import { ProdRuntimeManager } from "@trigger.dev/core/v3/prod"; @@ -103,6 +104,7 @@ taskCatalog.setGlobalTaskCatalog(new StandardTaskCatalog()); const durableClock = new DurableClock(); clock.setGlobalClock(durableClock); const runMetadataManager = new StandardMetadataManager( + apiClientManager.clientOrThrow(), getEnvVar("TRIGGER_STREAM_URL", getEnvVar("TRIGGER_API_URL")) ?? "https://api.trigger.dev" ); runMetadata.setGlobalManager(runMetadataManager); @@ -319,6 +321,8 @@ const zodIpc = new ZodIpcConnection({ _execution = execution; _isRunning = true; + runMetadataManager.runId = execution.run.id; + runMetadataManager.startPeriodicFlush( getNumberEnvVar("TRIGGER_RUN_METADATA_FLUSH_INTERVAL", 1000) ); diff --git a/packages/cli-v3/src/entryPoints/dev-run-worker.ts b/packages/cli-v3/src/entryPoints/dev-run-worker.ts index b3a9d81486..d4a1c05a56 100644 --- a/packages/cli-v3/src/entryPoints/dev-run-worker.ts +++ b/packages/cli-v3/src/entryPoints/dev-run-worker.ts @@ -16,6 +16,7 @@ import { timeout, runMetadata, waitUntil, + apiClientManager, } from "@trigger.dev/core/v3"; import { TriggerTracer } from "@trigger.dev/core/v3/tracer"; import { DevRuntimeManager } from "@trigger.dev/core/v3/dev"; @@ -85,6 +86,7 @@ const devRuntimeManager = new DevRuntimeManager(); runtime.setGlobalRuntimeManager(devRuntimeManager); timeout.setGlobalManager(new UsageTimeoutManager(devUsageManager)); const runMetadataManager = new StandardMetadataManager( + apiClientManager.clientOrThrow(), getEnvVar("TRIGGER_STREAM_URL", getEnvVar("TRIGGER_API_URL")) ?? "https://api.trigger.dev" ); runMetadata.setGlobalManager(runMetadataManager); @@ -289,6 +291,8 @@ const zodIpc = new ZodIpcConnection({ _execution = execution; _isRunning = true; + runMetadataManager.runId = execution.run.id; + runMetadataManager.startPeriodicFlush( getNumberEnvVar("TRIGGER_RUN_METADATA_FLUSH_INTERVAL", 1000) ); diff --git a/packages/core/package.json b/packages/core/package.json index 8ee1e52001..ea43dc5844 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -209,6 +209,7 @@ }, "devDependencies": { "@arethetypeswrong/cli": "^0.15.4", + "@epic-web/test-server": "^0.1.0", "@types/humanize-duration": "^3.27.1", "@types/node": "20.14.14", "@types/readable-stream": "^4.0.14", diff --git a/packages/core/src/v3/runMetadata/index.ts b/packages/core/src/v3/runMetadata/index.ts index df22321dd1..bc7a766fc1 100644 --- a/packages/core/src/v3/runMetadata/index.ts +++ b/packages/core/src/v3/runMetadata/index.ts @@ -49,6 +49,18 @@ export class RunMetadataAPI implements RunMetadataManager { return this.#getManager().deleteKey(key); } + public incrementKey(key: string, value: number): void { + return this.#getManager().incrementKey(key, value); + } + + decrementKey(key: string, value: number): void { + return this.#getManager().decrementKey(key, value); + } + + appendKey(key: string, value: DeserializedJson): void { + return this.#getManager().appendKey(key, value); + } + public update(metadata: Record): void { return this.#getManager().update(metadata); } diff --git a/packages/core/src/v3/runMetadata/manager.ts b/packages/core/src/v3/runMetadata/manager.ts index 1a8e51a7af..f4280ef916 100644 --- a/packages/core/src/v3/runMetadata/manager.ts +++ b/packages/core/src/v3/runMetadata/manager.ts @@ -1,11 +1,10 @@ import { JSONHeroPath } from "@jsonhero/path"; import { dequal } from "dequal/lite"; import { DeserializedJson } from "../../schemas/json.js"; -import { apiClientManager } from "../apiClientManager-api.js"; -import { taskContext } from "../task-context-api.js"; import { ApiRequestOptions } from "../zodfetch.js"; import { RunMetadataManager } from "./types.js"; import { MetadataStream } from "./metadataStream.js"; +import { ApiClient } from "../apiClient/index.js"; export class StandardMetadataManager implements RunMetadataManager { private flushTimeoutId: NodeJS.Timeout | null = null; @@ -14,7 +13,12 @@ export class StandardMetadataManager implements RunMetadataManager { // Add a Map to track active streams private activeStreams = new Map>(); - constructor(private streamsBaseUrl: string) {} + public runId: string | undefined; + + constructor( + private apiClient: ApiClient, + private streamsBaseUrl: string + ) {} public enterWithMetadata(metadata: Record): void { this.store = metadata ?? {}; @@ -29,9 +33,7 @@ export class StandardMetadataManager implements RunMetadataManager { } public setKey(key: string, value: DeserializedJson) { - const runId = taskContext.ctx?.run.id; - - if (!runId) { + if (!this.runId) { return; } @@ -61,9 +63,7 @@ export class StandardMetadataManager implements RunMetadataManager { } public deleteKey(key: string) { - const runId = taskContext.ctx?.run.id; - - if (!runId) { + if (!this.runId) { return; } @@ -77,10 +77,81 @@ export class StandardMetadataManager implements RunMetadataManager { this.store = nextStore; } - public update(metadata: Record): void { - const runId = taskContext.ctx?.run.id; + public appendKey(key: string, value: DeserializedJson) { + if (!this.runId) { + return; + } + + let nextStore: Record | undefined = this.store + ? structuredClone(this.store) + : {}; + + if (key.startsWith("$.")) { + const path = new JSONHeroPath(key); + const currentValue = path.first(nextStore); + + if (currentValue === undefined) { + // Initialize as array with single item + path.set(nextStore, [value]); + } else if (Array.isArray(currentValue)) { + // Append to existing array + path.set(nextStore, [...currentValue, value]); + } else { + // Convert to array if not already + path.set(nextStore, [currentValue, value]); + } + } else { + const currentValue = nextStore[key]; + + if (currentValue === undefined) { + // Initialize as array with single item + nextStore[key] = [value]; + } else if (Array.isArray(currentValue)) { + // Append to existing array + nextStore[key] = [...currentValue, value]; + } else { + // Convert to array if not already + nextStore[key] = [currentValue, value]; + } + } - if (!runId) { + if (!dequal(this.store, nextStore)) { + this.hasChanges = true; + } + + this.store = nextStore; + } + + public incrementKey(key: string, increment: number = 1) { + if (!this.runId) { + return; + } + + let nextStore = this.store ? structuredClone(this.store) : {}; + let currentValue = key.startsWith("$.") + ? new JSONHeroPath(key).first(nextStore) + : nextStore[key]; + + const newValue = (typeof currentValue === "number" ? currentValue : 0) + increment; + + if (key.startsWith("$.")) { + new JSONHeroPath(key).set(nextStore, newValue); + } else { + nextStore[key] = newValue; + } + + if (!dequal(this.store, nextStore)) { + this.hasChanges = true; + this.store = nextStore; + } + } + + public decrementKey(key: string, decrement: number = 1) { + this.incrementKey(key, -decrement); + } + + public update(metadata: Record): void { + if (!this.runId) { return; } @@ -96,9 +167,7 @@ export class StandardMetadataManager implements RunMetadataManager { value: AsyncIterable, signal?: AbortSignal ): Promise> { - const runId = taskContext.ctx?.run.id; - - if (!runId) { + if (!this.runId) { return value; } @@ -109,7 +178,7 @@ export class StandardMetadataManager implements RunMetadataManager { const streamInstance = new MetadataStream({ key, - runId, + runId: this.runId, iterator: value[Symbol.asyncIterator](), baseUrl: this.streamsBaseUrl, signal, @@ -153,9 +222,7 @@ export class StandardMetadataManager implements RunMetadataManager { } public async flush(requestOptions?: ApiRequestOptions): Promise { - const runId = taskContext.ctx?.run.id; - - if (!runId) { + if (!this.runId) { return; } @@ -167,11 +234,9 @@ export class StandardMetadataManager implements RunMetadataManager { return; } - const apiClient = apiClientManager.clientOrThrow(); - try { this.hasChanges = false; - await apiClient.updateRunMetadata(runId, { metadata: this.store }, requestOptions); + await this.apiClient.updateRunMetadata(this.runId, { metadata: this.store }, requestOptions); } catch (error) { this.hasChanges = true; throw error; diff --git a/packages/core/src/v3/runMetadata/metadataStream.ts b/packages/core/src/v3/runMetadata/metadataStream.ts index 62f8c4c56f..247d81c4c3 100644 --- a/packages/core/src/v3/runMetadata/metadataStream.ts +++ b/packages/core/src/v3/runMetadata/metadataStream.ts @@ -63,6 +63,8 @@ export class MetadataStream { cancel: () => this.controller.abort(), }); + console.log("Posting server stream to ", this.options.baseUrl); + return fetch( `${this.options.baseUrl}/realtime/v1/streams/${this.options.runId}/${this.options.key}`, { @@ -73,9 +75,7 @@ export class MetadataStream { duplex: "half", signal: this.controller.signal, } - ).catch((error) => { - console.error("Error in stream:", error); - }); + ); } public async wait(): Promise { diff --git a/packages/core/src/v3/runMetadata/noopManager.ts b/packages/core/src/v3/runMetadata/noopManager.ts index 3eb23692d6..753dcfb7b1 100644 --- a/packages/core/src/v3/runMetadata/noopManager.ts +++ b/packages/core/src/v3/runMetadata/noopManager.ts @@ -3,6 +3,15 @@ import { ApiRequestOptions } from "../zodfetch.js"; import type { RunMetadataManager } from "./types.js"; export class NoopRunMetadataManager implements RunMetadataManager { + appendKey(key: string, value: DeserializedJson): void { + throw new Error("Method not implemented."); + } + incrementKey(key: string, value: number): void { + throw new Error("Method not implemented."); + } + decrementKey(key: string, value: number): void { + throw new Error("Method not implemented."); + } stream(key: string, value: AsyncIterable): Promise> { throw new Error("Method not implemented."); } diff --git a/packages/core/src/v3/runMetadata/types.ts b/packages/core/src/v3/runMetadata/types.ts index 3ef649df8f..6e7ece8fef 100644 --- a/packages/core/src/v3/runMetadata/types.ts +++ b/packages/core/src/v3/runMetadata/types.ts @@ -8,6 +8,9 @@ export interface RunMetadataManager { getKey(key: string): DeserializedJson | undefined; setKey(key: string, value: DeserializedJson): void; deleteKey(key: string): void; + appendKey(key: string, value: DeserializedJson): void; + incrementKey(key: string, value: number): void; + decrementKey(key: string, value: number): void; update(metadata: Record): void; flush(requestOptions?: ApiRequestOptions): Promise; stream(key: string, value: AsyncIterable, signal?: AbortSignal): Promise>; diff --git a/packages/core/test/standardMetadataManager.test.ts b/packages/core/test/standardMetadataManager.test.ts new file mode 100644 index 0000000000..072b6d3e35 --- /dev/null +++ b/packages/core/test/standardMetadataManager.test.ts @@ -0,0 +1,207 @@ +import { describe, test, expect, beforeEach, afterEach } from "vitest"; +import { createTestHttpServer } from "@epic-web/test-server/http"; +import { StandardMetadataManager } from "../src/v3/runMetadata/manager.js"; +import { ApiClient } from "../src/v3/apiClient/index.js"; + +describe("StandardMetadataManager", () => { + const runId = "test-run-id"; + let server: Awaited>; + let metadataUpdates: Array> = []; + let manager: StandardMetadataManager; + + beforeEach(async () => { + metadataUpdates = []; + + server = await createTestHttpServer({ + defineRoutes(router) { + router.put("/api/v1/runs/:runId/metadata", async ({ req }) => { + const body = await req.json(); + metadataUpdates.push(body); + return Response.json({ metadata: body.metadata }); + }); + }, + }); + + const apiClient = new ApiClient(server.http.url().origin, "tr-123"); + + manager = new StandardMetadataManager(apiClient, server.http.url().origin); + manager.runId = runId; + }); + + afterEach(async () => { + await server.close(); + }); + + test("should initialize with empty store", () => { + expect(manager.current()).toBeUndefined(); + }); + + test("should set and get simple keys", () => { + manager.setKey("test", "value"); + expect(manager.getKey("test")).toBe("value"); + }); + + test("should handle JSON path keys", () => { + manager.setKey("nested", { foo: "bar" }); + manager.setKey("$.nested.path", "value"); + expect(manager.current()).toEqual({ + nested: { + foo: "bar", + path: "value", + }, + }); + }); + + test("should flush changes to server", async () => { + manager.setKey("test", "value"); + await manager.flush(); + + expect(metadataUpdates).toHaveLength(1); + expect(metadataUpdates[0]).toEqual({ + metadata: { + test: "value", + }, + }); + }); + + test("should only flush to server when data has actually changed", async () => { + // Initial set and flush + manager.setKey("test", "value"); + await manager.flush(); + expect(metadataUpdates).toHaveLength(1); + + // Same value set again + manager.setKey("test", "value"); + await manager.flush(); + // Should not trigger another update since value hasn't changed + expect(metadataUpdates).toHaveLength(1); + + // Different value set + manager.setKey("test", "new value"); + await manager.flush(); + // Should trigger new update + expect(metadataUpdates).toHaveLength(2); + }); + + test("should only flush to server when nested data has actually changed", async () => { + // Initial nested object + manager.setKey("nested", { foo: "bar" }); + await manager.flush(); + expect(metadataUpdates).toHaveLength(1); + + // Same nested value + manager.setKey("nested", { foo: "bar" }); + await manager.flush(); + // Should not trigger another update + expect(metadataUpdates).toHaveLength(1); + + // Different nested value + manager.setKey("nested", { foo: "baz" }); + await manager.flush(); + // Should trigger new update + expect(metadataUpdates).toHaveLength(2); + }); + + test("should append to list with simple key", () => { + // First append creates the array + manager.appendKey("myList", "first"); + expect(manager.getKey("myList")).toEqual(["first"]); + + // Second append adds to existing array + manager.appendKey("myList", "second"); + expect(manager.getKey("myList")).toEqual(["first", "second"]); + }); + + test("should append to list with JSON path", () => { + // First create nested structure + manager.setKey("nested", { items: [] }); + + // Append to empty array + manager.appendKey("$.nested.items", "first"); + expect(manager.current()).toEqual({ + nested: { + items: ["first"], + }, + }); + + // Append another item + manager.appendKey("$.nested.items", "second"); + expect(manager.current()).toEqual({ + nested: { + items: ["first", "second"], + }, + }); + }); + + test("should convert non-array values to array when appending", () => { + // Set initial non-array value + manager.setKey("value", "initial"); + + // Append should convert to array + manager.appendKey("value", "second"); + expect(manager.getKey("value")).toEqual(["initial", "second"]); + }); + + test("should convert non-array values to array when appending with JSON path", () => { + // Set initial nested non-array value + manager.setKey("nested", { value: "initial" }); + + // Append should convert to array + manager.appendKey("$.nested.value", "second"); + expect(manager.current()).toEqual({ + nested: { + value: ["initial", "second"], + }, + }); + }); + + test("should trigger server update when appending to list", async () => { + manager.appendKey("myList", "first"); + await manager.flush(); + + expect(metadataUpdates).toHaveLength(1); + expect(metadataUpdates[0]).toEqual({ + metadata: { + myList: ["first"], + }, + }); + + manager.appendKey("myList", "second"); + await manager.flush(); + + expect(metadataUpdates).toHaveLength(2); + expect(metadataUpdates[1]).toEqual({ + metadata: { + myList: ["first", "second"], + }, + }); + }); + + test("should not trigger server update when appending same value", async () => { + manager.appendKey("myList", "first"); + await manager.flush(); + + expect(metadataUpdates).toHaveLength(1); + + // Append same value + manager.appendKey("myList", "first"); + await manager.flush(); + + // Should still be only one update + expect(metadataUpdates).toHaveLength(2); + }); + + test("should increment and decrement keys", () => { + manager.incrementKey("counter"); + expect(manager.getKey("counter")).toBe(1); + + manager.incrementKey("counter", 5); + expect(manager.getKey("counter")).toBe(6); + + manager.decrementKey("counter"); + expect(manager.getKey("counter")).toBe(5); + + manager.decrementKey("counter", 3); + expect(manager.getKey("counter")).toBe(2); + }); +}); diff --git a/packages/trigger-sdk/src/v3/metadata.ts b/packages/trigger-sdk/src/v3/metadata.ts index 4ca12b0200..334dafc6bb 100644 --- a/packages/trigger-sdk/src/v3/metadata.ts +++ b/packages/trigger-sdk/src/v3/metadata.ts @@ -26,6 +26,9 @@ export const metadata = { replace: replaceMetadata, flush: flushMetadata, stream: stream, + append: appendMetadataKey, + increment: incrementMetadataKey, + decrement: decrementMetadataKey, }; export type RunMetadata = Record; @@ -106,6 +109,18 @@ function saveMetadata(metadata: RunMetadata): void { runMetadata.update(metadata); } +function incrementMetadataKey(key: string, value: number) { + runMetadata.incrementKey(key, value); +} + +function decrementMetadataKey(key: string, value: number) { + runMetadata.decrementKey(key, value); +} + +function appendMetadataKey(key: string, value: DeserializedJson) { + runMetadata.appendKey(key, value); +} + /** * Flushes metadata to the Trigger.dev instance * diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e844c29aef..e648760986 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1338,6 +1338,9 @@ importers: '@arethetypeswrong/cli': specifier: ^0.15.4 version: 0.15.4 + '@epic-web/test-server': + specifier: ^0.1.0 + version: 0.1.0 '@types/humanize-duration': specifier: ^3.27.1 version: 3.27.1 From c784427ba4a4339ddcd8228823810ee06b766028 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 11 Nov 2024 15:25:02 +0000 Subject: [PATCH 06/31] Add stream tests and improve streaming --- .vscode/launch.json | 9 ++ .../app/services/realtimeStreams.server.ts | 138 +++++++++--------- apps/webapp/test/realtimeStreams.test.ts | 106 ++++++++++++++ apps/webapp/test/utils/streams.ts | 46 ++++++ packages/core/package.json | 1 + packages/core/src/v3/apiClient/runStream.ts | 52 +++---- packages/core/test/runStream.test.ts | 6 +- pnpm-lock.yaml | 8 + 8 files changed, 256 insertions(+), 110 deletions(-) create mode 100644 apps/webapp/test/realtimeStreams.test.ts create mode 100644 apps/webapp/test/utils/streams.ts diff --git a/.vscode/launch.json b/.vscode/launch.json index d70f6bdd98..07f723b6b7 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -13,6 +13,15 @@ "cwd": "${workspaceFolder}", "sourceMaps": true }, + { + "type": "node-terminal", + "request": "launch", + "name": "Debug realtimeStreams.test.ts", + "command": "pnpm run test -t RealtimeStreams", + "envFile": "${workspaceFolder}/.env", + "cwd": "${workspaceFolder}/apps/webapp", + "sourceMaps": true + }, { "type": "chrome", "request": "launch", diff --git a/apps/webapp/app/services/realtimeStreams.server.ts b/apps/webapp/app/services/realtimeStreams.server.ts index 7b3eff9a0e..c1b64ea314 100644 --- a/apps/webapp/app/services/realtimeStreams.server.ts +++ b/apps/webapp/app/services/realtimeStreams.server.ts @@ -5,100 +5,93 @@ export type RealtimeStreamsOptions = { redis: RedisOptions | undefined; }; +const END_SENTINEL = "<>"; + export class RealtimeStreams { constructor(private options: RealtimeStreamsOptions) {} async streamResponse(runId: string, streamId: string, signal: AbortSignal): Promise { const redis = new Redis(this.options.redis ?? {}); const streamKey = `stream:${runId}:${streamId}`; + let isCleanedUp = false; + + const stream = new ReadableStream({ + start: async (controller) => { + let lastId = "0"; + let retryCount = 0; + const maxRetries = 3; - const stream = new TransformStream({ - transform(chunk: string, controller) { try { - const data = JSON.parse(chunk); + while (!signal.aborted) { + try { + const messages = await redis.xread( + "COUNT", + 100, + "BLOCK", + 5000, + "STREAMS", + streamKey, + lastId + ); + + retryCount = 0; + + if (messages && messages.length > 0) { + const [_key, entries] = messages[0]; + + for (const [id, fields] of entries) { + lastId = id; + + if (fields && fields.length >= 2) { + if (fields[1] === END_SENTINEL) { + controller.close(); + return; + } + controller.enqueue(`data: ${fields[1]}\n\n`); + + if (signal.aborted) { + controller.close(); + return; + } + } + } + } + } catch (error) { + if (signal.aborted) break; - if (typeof data === "object" && data !== null && "__end" in data && data.__end === true) { - controller.terminate(); - return; + console.error("Error reading from Redis stream:", error); + retryCount++; + if (retryCount >= maxRetries) throw error; + await new Promise((resolve) => setTimeout(resolve, 1000 * retryCount)); + } } - controller.enqueue(`data: ${chunk}\n\n`); } catch (error) { - console.error("Invalid JSON in stream:", error); + console.error("Fatal error in stream processing:", error); + controller.error(error); + } finally { + await cleanup(); } }, - }); - - const response = new Response(stream.readable, { - headers: { - "Content-Type": "text/event-stream", - "Cache-Control": "no-cache", - Connection: "keep-alive", + cancel: async () => { + await cleanup(); }, }); - let isCleanedUp = false; - async function cleanup() { if (isCleanedUp) return; isCleanedUp = true; - await redis.quit(); - const writer = stream.writable.getWriter(); - if (writer) await writer.close().catch(() => {}); // Ensure close doesn't error if already closed + await redis.quit().catch(console.error); } signal.addEventListener("abort", cleanup); - (async () => { - let lastId = "0"; - let retryCount = 0; - const maxRetries = 3; - - try { - while (!signal.aborted) { - try { - const messages = await redis.xread( - "COUNT", - 100, - "BLOCK", - 5000, - "STREAMS", - streamKey, - lastId - ); - - retryCount = 0; - - if (messages && messages.length > 0) { - const [_key, entries] = messages[0]; - - for (const [id, fields] of entries) { - lastId = id; - - if (fields && fields.length >= 2 && !stream.writable.locked) { - const writer = stream.writable.getWriter(); - try { - await writer.write(fields[1]); - } finally { - writer.releaseLock(); - } - } - } - } - } catch (error) { - console.error("Error reading from Redis stream:", error); - retryCount++; - if (retryCount >= maxRetries) throw error; - await new Promise((resolve) => setTimeout(resolve, 1000 * retryCount)); - } - } - } catch (error) { - console.error("Fatal error in stream processing:", error); - } finally { - await cleanup(); - } - })(); - - return response; + return new Response(stream.pipeThrough(new TextEncoderStream()), { + headers: { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", + }, + }); } async ingestData( @@ -166,8 +159,7 @@ export class RealtimeStreams { } // Send the __end message to indicate the end of the stream - const endData = JSON.stringify({ __end: true }); - await redis.xadd(streamKey, "MAXLEN", "~", "1000", "*", "data", endData); + await redis.xadd(streamKey, "MAXLEN", "~", "1000", "*", "data", END_SENTINEL); return new Response(null, { status: 200 }); } catch (error) { diff --git a/apps/webapp/test/realtimeStreams.test.ts b/apps/webapp/test/realtimeStreams.test.ts new file mode 100644 index 0000000000..dd60297f22 --- /dev/null +++ b/apps/webapp/test/realtimeStreams.test.ts @@ -0,0 +1,106 @@ +import { redisTest } from "@internal/testcontainers"; +import { describe, expect, vi } from "vitest"; +import { RealtimeStreams } from "../app/services/realtimeStreams.server.js"; +import { convertArrayToReadableStream, convertResponseSSEStreamToArray } from "./utils/streams.js"; + +vi.setConfig({ testTimeout: 10_000 }); // 5 seconds + +// Mock the logger +vi.mock("./logger.server", () => ({ + logger: { + debug: vi.fn(), + error: vi.fn(), + }, +})); + +describe("RealtimeStreams", () => { + redisTest("should stream data from producer to consumer", async ({ redis }) => { + const streams = new RealtimeStreams({ redis: redis.options }); + const runId = "test-run"; + const streamId = "test-stream"; + + // Create a stream of test data + const stream = convertArrayToReadableStream(["chunk1", "chunk2", "chunk3"]).pipeThrough( + new TextEncoderStream() + ); + + // Start consuming the stream + const abortController = new AbortController(); + const responsePromise = streams.streamResponse(runId, streamId, abortController.signal); + + // Start ingesting data + await streams.ingestData(stream, runId, streamId); + + // Get the response and read the stream + const response = await responsePromise; + const received = await convertResponseSSEStreamToArray(response); + + expect(received).toEqual(["chunk1", "chunk2", "chunk3"]); + }); + + redisTest("should handle multiple concurrent streams", async ({ redis }) => { + const streams = new RealtimeStreams({ redis: redis.options }); + const runId = "test-run"; + + // Set up two different streams + const stream1 = convertArrayToReadableStream(["1a", "1b", "1c"]).pipeThrough( + new TextEncoderStream() + ); + const stream2 = convertArrayToReadableStream(["2a", "2b", "2c"]).pipeThrough( + new TextEncoderStream() + ); + + // Start consuming both streams + const abortController = new AbortController(); + const response1Promise = streams.streamResponse(runId, "stream1", abortController.signal); + const response2Promise = streams.streamResponse(runId, "stream2", abortController.signal); + + // Ingest data to both streams + await Promise.all([ + streams.ingestData(stream1, runId, "stream1"), + streams.ingestData(stream2, runId, "stream2"), + ]); + + // Get and verify both responses + const [response1, response2] = await Promise.all([response1Promise, response2Promise]); + const [received1, received2] = await Promise.all([ + convertResponseSSEStreamToArray(response1), + convertResponseSSEStreamToArray(response2), + ]); + + expect(received1).toEqual(["1a", "1b", "1c"]); + expect(received2).toEqual(["2a", "2b", "2c"]); + }); + + redisTest("should handle early consumer abort", async ({ redis }) => { + const streams = new RealtimeStreams({ redis: redis.options }); + const runId = "test-run"; + const streamId = "test-stream"; + + const stream = convertArrayToReadableStream(["chunk1", "chunk2", "chunk3"]).pipeThrough( + new TextEncoderStream() + ); + + // Start consuming but abort early + const abortController = new AbortController(); + const responsePromise = streams.streamResponse(runId, streamId, abortController.signal); + + // Get the response before aborting to ensure stream is properly set up + const response = await responsePromise; + + // Start reading the stream + const readPromise = convertResponseSSEStreamToArray(response); + + // Abort after a small delay to ensure everything is set up + await new Promise((resolve) => setTimeout(resolve, 100)); + abortController.abort(); + + // Start ingesting data after abort + await streams.ingestData(stream, runId, streamId); + + // Verify the stream was terminated + const received = await readPromise; + + expect(received).toEqual(["chunk1"]); + }); +}); diff --git a/apps/webapp/test/utils/streams.ts b/apps/webapp/test/utils/streams.ts new file mode 100644 index 0000000000..79249b4d6c --- /dev/null +++ b/apps/webapp/test/utils/streams.ts @@ -0,0 +1,46 @@ +export async function convertResponseStreamToArray(response: Response): Promise { + return convertReadableStreamToArray(response.body!.pipeThrough(new TextDecoderStream())); +} + +export async function convertResponseSSEStreamToArray(response: Response): Promise { + const parseSSEDataTransform = new TransformStream({ + async transform(chunk, controller) { + for (const line of chunk.split("\n")) { + if (line.startsWith("data:")) { + controller.enqueue(line.slice(6)); + } + } + }, + }); + + return convertReadableStreamToArray( + response.body!.pipeThrough(new TextDecoderStream()).pipeThrough(parseSSEDataTransform) + ); +} + +export async function convertReadableStreamToArray(stream: ReadableStream): Promise { + const reader = stream.getReader(); + const result: T[] = []; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + result.push(value); + } + + return result; +} + +export function convertArrayToReadableStream(values: T[]): ReadableStream { + return new ReadableStream({ + start(controller) { + try { + for (const value of values) { + controller.enqueue(value); + } + } finally { + controller.close(); + } + }, + }); +} diff --git a/packages/core/package.json b/packages/core/package.json index ea43dc5844..8680dededc 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -197,6 +197,7 @@ "@opentelemetry/sdk-trace-node": "1.25.1", "@opentelemetry/semantic-conventions": "1.25.1", "dequal": "^2.0.3", + "eventsource-parser": "^3.0.0", "execa": "^8.0.1", "humanize-duration": "^3.27.3", "jose": "^5.4.0", diff --git a/packages/core/src/v3/apiClient/runStream.ts b/packages/core/src/v3/apiClient/runStream.ts index 6769235b2e..3ecd60c64b 100644 --- a/packages/core/src/v3/apiClient/runStream.ts +++ b/packages/core/src/v3/apiClient/runStream.ts @@ -10,6 +10,7 @@ import { } from "../utils/ioSerialization.js"; import { ApiClient } from "./index.js"; import { AsyncIterableStream, createAsyncIterableStream, zodShapeStream } from "./stream.js"; +import { EventSourceParserStream } from "eventsource-parser/stream"; export type RunShape = TRunTypes extends AnyRunTypes ? { @@ -121,43 +122,18 @@ export class SSEStreamSubscription implements StreamSubscription { throw new Error("No response body"); } - const reader = response.body.getReader(); - const decoder = new TextDecoder(); - let buffer = ""; + const reader = response.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(new EventSourceParserStream()) + .getReader(); - (async () => { - try { - while (true) { - const { done, value } = await reader.read(); + while (true) { + const { done, value } = await reader.read(); - if (done) break; + if (done) break; - buffer += decoder.decode(value, { stream: true }); - - const lines = buffer.split("\n"); - buffer = lines.pop() || ""; - - for (const line of lines) { - if (line.trim() && !line.startsWith(":")) { - try { - // Strip the "data: " prefix before parsing - const data = line.replace(/^data: /, ""); - const chunk = JSON.parse(data); - await onChunk(chunk); - } catch (e) { - console.error("Error parsing stream chunk:", e); - console.error("Raw line:", line); - } - } - } - } - } catch (error) { - if (error instanceof Error && error.name === "AbortError") { - return; - } - console.error("Error in stream subscription:", error); - } - })(); + await onChunk(safeParseJSON(value.data)); + } return () => reader.cancel(); } @@ -394,3 +370,11 @@ function apiStatusFromRunStatus(status: string): RunStatus { } } } + +function safeParseJSON(data: string): unknown { + try { + return JSON.parse(data); + } catch (error) { + return data; + } +} diff --git a/packages/core/test/runStream.test.ts b/packages/core/test/runStream.test.ts index 2ccbef5ab7..50b1b2bfca 100644 --- a/packages/core/test/runStream.test.ts +++ b/packages/core/test/runStream.test.ts @@ -68,7 +68,7 @@ class DelayedTestShapeProvider implements RunShapeProvider { async onShape(callback: (shape: SubscribeRunRawShape) => Promise): Promise<() => void> { // Only emit the first shape immediately if (this.shapes.length > 0) { - await callback(this.shapes[this.currentShapeIndex++]); + await callback(this.shapes[this.currentShapeIndex++]!); } // Set up an interval to emit remaining shapes @@ -77,7 +77,7 @@ class DelayedTestShapeProvider implements RunShapeProvider { clearInterval(interval); return; } - await callback(this.shapes[this.currentShapeIndex++]); + await callback(this.shapes[this.currentShapeIndex++]!); }, 100); return () => { @@ -560,7 +560,7 @@ describe("RunSubscription withStreams", () => { // Verify run updates const runUpdates = results.filter((r) => r.type === "run"); expect(runUpdates).toHaveLength(3); - expect(runUpdates[2].run.status).toBe("COMPLETED"); + expect(runUpdates[2]!.run.status).toBe("COMPLETED"); // Verify openai chunks const openaiChunks = results.filter((r) => r.type === "openai"); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e648760986..d508b9866e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1307,6 +1307,9 @@ importers: dequal: specifier: ^2.0.3 version: 2.0.3 + eventsource-parser: + specifier: ^3.0.0 + version: 3.0.0 execa: specifier: ^8.0.1 version: 8.0.1 @@ -20294,6 +20297,11 @@ packages: engines: {node: '>=14.18'} dev: false + /eventsource-parser@3.0.0: + resolution: {integrity: sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA==} + engines: {node: '>=18.0.0'} + dev: false + /evt@2.4.13: resolution: {integrity: sha512-haTVOsmjzk+28zpzvVwan9Zw2rLQF2izgi7BKjAPRzZAfcv+8scL0TpM8MzvGNKFYHiy+Bq3r6FYIIUPl9kt3A==} dependencies: From 7114ce4776dcc7c039daf3cffb1ed187238a1826 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 12 Nov 2024 11:59:06 +0000 Subject: [PATCH 07/31] Added AI tool tasks, descriptions to tasks --- .../services/createBackgroundWorker.server.ts | 1 + .../migration.sql | 2 + .../database/prisma/schema.prisma | 2 + packages/core/package.json | 6 +- packages/core/src/v3/schemas/resources.ts | 1 + packages/core/src/v3/schemas/schemas.ts | 1 + packages/core/src/v3/types/index.ts | 1 + packages/core/src/v3/types/tasks.ts | 45 +++- packages/core/src/v3/types/tools.ts | 36 +++ packages/trigger-sdk/package.json | 10 +- packages/trigger-sdk/src/v3/shared.ts | 41 +++- packages/trigger-sdk/src/v3/tasks.ts | 3 + pnpm-lock.yaml | 226 ++++++++++++++---- references/nextjs-realtime/package.json | 2 + references/nextjs-realtime/src/trigger/ai.ts | 107 ++++++--- 15 files changed, 394 insertions(+), 90 deletions(-) create mode 100644 internal-packages/database/prisma/migrations/20241112111156_add_description_to_background_worker_task/migration.sql create mode 100644 packages/core/src/v3/types/tools.ts diff --git a/apps/webapp/app/v3/services/createBackgroundWorker.server.ts b/apps/webapp/app/v3/services/createBackgroundWorker.server.ts index c9d19fd7f2..a314b0f401 100644 --- a/apps/webapp/app/v3/services/createBackgroundWorker.server.ts +++ b/apps/webapp/app/v3/services/createBackgroundWorker.server.ts @@ -150,6 +150,7 @@ export async function createBackgroundTasks( runtimeEnvironmentId: worker.runtimeEnvironmentId, workerId: worker.id, slug: task.id, + description: task.description, filePath: task.filePath, exportName: task.exportName, retryConfig: task.retry, diff --git a/internal-packages/database/prisma/migrations/20241112111156_add_description_to_background_worker_task/migration.sql b/internal-packages/database/prisma/migrations/20241112111156_add_description_to_background_worker_task/migration.sql new file mode 100644 index 0000000000..2958f8ebfa --- /dev/null +++ b/internal-packages/database/prisma/migrations/20241112111156_add_description_to_background_worker_task/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "BackgroundWorkerTask" ADD COLUMN "description" TEXT; diff --git a/internal-packages/database/prisma/schema.prisma b/internal-packages/database/prisma/schema.prisma index d263409b52..ee6ea72166 100644 --- a/internal-packages/database/prisma/schema.prisma +++ b/internal-packages/database/prisma/schema.prisma @@ -1610,6 +1610,8 @@ model BackgroundWorkerTask { id String @id @default(cuid()) slug String + description String? + friendlyId String @unique filePath String diff --git a/packages/core/package.json b/packages/core/package.json index 8680dededc..b236eac2ab 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -204,16 +204,18 @@ "nanoid": "^3.3.4", "socket.io-client": "4.7.5", "superjson": "^2.2.1", - "zod": "3.22.3", "zod-error": "1.5.0", - "zod-validation-error": "^1.5.0" + "zod-validation-error": "^1.5.0", + "zod": "3.22.3" }, "devDependencies": { + "@ai-sdk/provider-utils": "^1.0.22", "@arethetypeswrong/cli": "^0.15.4", "@epic-web/test-server": "^0.1.0", "@types/humanize-duration": "^3.27.1", "@types/node": "20.14.14", "@types/readable-stream": "^4.0.14", + "ai": "^3.4.33", "defu": "^6.1.4", "esbuild": "^0.23.0", "rimraf": "^3.0.2", diff --git a/packages/core/src/v3/schemas/resources.ts b/packages/core/src/v3/schemas/resources.ts index 01fda90ca0..b44efb80bb 100644 --- a/packages/core/src/v3/schemas/resources.ts +++ b/packages/core/src/v3/schemas/resources.ts @@ -4,6 +4,7 @@ import { MachineConfig } from "./common.js"; export const TaskResource = z.object({ id: z.string(), + description: z.string().optional(), filePath: z.string(), exportName: z.string(), queue: QueueOptions.optional(), diff --git a/packages/core/src/v3/schemas/schemas.ts b/packages/core/src/v3/schemas/schemas.ts index 1b804ec3c4..11b50eece7 100644 --- a/packages/core/src/v3/schemas/schemas.ts +++ b/packages/core/src/v3/schemas/schemas.ts @@ -149,6 +149,7 @@ export const ScheduleMetadata = z.object({ const taskMetadata = { id: z.string(), + description: z.string().optional(), queue: QueueOptions.optional(), retry: RetryOptions.optional(), machine: MachineConfig.optional(), diff --git a/packages/core/src/v3/types/index.ts b/packages/core/src/v3/types/index.ts index 2aebb98db2..c3be5f54d4 100644 --- a/packages/core/src/v3/types/index.ts +++ b/packages/core/src/v3/types/index.ts @@ -5,6 +5,7 @@ import { Prettify } from "./utils.js"; export * from "./utils.js"; export * from "./tasks.js"; export * from "./idempotencyKeys.js"; +export * from "./tools.js"; type ResolveEnvironmentVariablesOptions = { variables: Record | Array<{ name: string; value: string }>; diff --git a/packages/core/src/v3/types/tasks.ts b/packages/core/src/v3/types/tasks.ts index df913ed0b4..dd507508f0 100644 --- a/packages/core/src/v3/types/tasks.ts +++ b/packages/core/src/v3/types/tasks.ts @@ -1,7 +1,8 @@ +import type { Schema as AISchema } from "ai"; +import { z } from "zod"; import { SerializableJson } from "../../schemas/json.js"; +import { TriggerApiRequestOptions } from "../apiClient/index.js"; import { RunTags } from "../schemas/api.js"; -import { QueueOptions } from "../schemas/schemas.js"; -import { IdempotencyKey } from "./idempotencyKeys.js"; import { MachineCpu, MachineMemory, @@ -9,9 +10,11 @@ import { TaskMetadata, TaskRunContext, } from "../schemas/index.js"; +import { QueueOptions } from "../schemas/schemas.js"; +import { IdempotencyKey } from "./idempotencyKeys.js"; +import { AnySchemaParseFn, inferSchemaIn, inferSchemaOut, Schema } from "./schemas.js"; import { Prettify } from "./utils.js"; -import { AnySchemaParseFn, inferSchemaOut, Schema } from "./schemas.js"; -import { TriggerApiRequestOptions } from "../apiClient/index.js"; +import { inferToolParameters, ToolTaskParameters } from "./tools.js"; type RequireOne = { [X in Exclude]?: T[X]; @@ -150,6 +153,8 @@ type CommonTaskOptions< /** An id for your task. This must be unique inside your project and not change between versions. */ id: TIdentifier; + description?: string; + /** The retry settings when an uncaught error is thrown. * * If omitted it will use the values in your `trigger.config.ts` file. @@ -337,6 +342,15 @@ export type TaskWithSchemaOptions< schema?: TSchema; }; +export type TaskWithToolOptions< + TIdentifier extends string, + TParameters extends ToolTaskParameters, + TOutput = unknown, + TInitOutput extends InitOutput = any, +> = CommonTaskOptions, TOutput, TInitOutput> & { + parameters: TParameters; +}; + declare const __output: unique symbol; declare const __payload: unique symbol; type BrandRun = { [__output]: O; [__payload]: P }; @@ -413,6 +427,9 @@ export interface Task * The id of the task. */ id: TIdentifier; + + description?: string; + /** * Trigger a task with the given payload, and continue without waiting for the result. If you want to wait for the result, use `triggerAndWait`. Returns the id of the triggered task run. * @param payload @@ -479,6 +496,26 @@ export interface Task batchTriggerAndWait: (items: Array>) => Promise>; } +export interface TaskWithSchema< + TIdentifier extends string, + TSchema extends TaskSchema | undefined = undefined, + TOutput = any, +> extends Task, TOutput> { + schema?: TSchema; +} + +export interface ToolTask< + TIdentifier extends string, + TParameters extends ToolTaskParameters, + TOutput = any, +> extends Task, TOutput> { + tool: { + parameters: TParameters; + description?: string; + execute: (args: inferToolParameters) => Promise; + }; +} + export type AnyTask = Task; export type TaskPayload = TTask extends Task diff --git a/packages/core/src/v3/types/tools.ts b/packages/core/src/v3/types/tools.ts new file mode 100644 index 0000000000..078c52feb6 --- /dev/null +++ b/packages/core/src/v3/types/tools.ts @@ -0,0 +1,36 @@ +import { z } from "zod"; +import type { Schema as AISchema } from "ai"; +import { Schema } from "./schemas.js"; + +export type ToolTaskParameters = z.ZodTypeAny | AISchema; + +export type inferToolParameters = + PARAMETERS extends AISchema + ? PARAMETERS["_type"] + : PARAMETERS extends z.ZodTypeAny + ? z.infer + : never; + +export function convertToolParametersToSchema( + toolParameters: TToolParameters +): Schema { + return toolParameters instanceof z.ZodSchema + ? toolParameters + : convertAISchemaToTaskSchema(toolParameters); +} + +function convertAISchemaToTaskSchema(schema: AISchema): Schema { + return (payload: unknown) => { + const result = schema.validate?.(payload); + + if (!result) { + throw new Error("Invalid payload"); + } + + if (!result.success) { + throw result.error; + } + + return result.value; + }; +} diff --git a/packages/trigger-sdk/package.json b/packages/trigger-sdk/package.json index 687e30c959..1f3bbc9377 100644 --- a/packages/trigger-sdk/package.json +++ b/packages/trigger-sdk/package.json @@ -57,8 +57,7 @@ "terminal-link": "^3.0.0", "ulid": "^2.3.0", "uuid": "^9.0.0", - "ws": "^8.11.0", - "zod": "3.22.3" + "ws": "^8.11.0" }, "devDependencies": { "@arethetypeswrong/cli": "^0.15.4", @@ -67,12 +66,17 @@ "@types/slug": "^5.0.3", "@types/uuid": "^9.0.0", "@types/ws": "^8.5.3", + "ai": "^3.4.33", "encoding": "^0.1.13", "rimraf": "^3.0.2", "tshy": "^3.0.2", "tsx": "4.17.0", "typed-emitter": "^2.1.0", - "typescript": "^5.5.4" + "typescript": "^5.5.4", + "zod": "3.22.3" + }, + "peerDependencies": { + "zod": "^3.0.0" }, "engines": { "node": ">=18.20.0" diff --git a/packages/trigger-sdk/src/v3/shared.ts b/packages/trigger-sdk/src/v3/shared.ts index a1f1139497..28afc7e38b 100644 --- a/packages/trigger-sdk/src/v3/shared.ts +++ b/packages/trigger-sdk/src/v3/shared.ts @@ -11,6 +11,7 @@ import { ApiRequestOptions, BatchTaskRunExecutionResult, conditionallyImportPacket, + convertToolParametersToSchema, createErrorTaskError, defaultRetryOptions, getSchemaParseFn, @@ -32,6 +33,7 @@ import { import { IdempotencyKey, idempotencyKeys, isIdempotencyKey } from "./idempotencyKeys.js"; import { PollOptions, runs } from "./runs.js"; import { tracer } from "./tracer.js"; +import type { Schema as AISchema, CoreTool } from "ai"; import type { AnyRunHandle, @@ -60,9 +62,14 @@ import type { TaskRunOptions, TaskRunResult, TaskSchema, + TaskWithSchema, TaskWithSchemaOptions, + TaskWithToolOptions, + ToolTask, + ToolTaskParameters, TriggerApiRequestOptions, } from "@trigger.dev/core/v3"; +import { z } from "zod"; export type { AnyRunHandle, @@ -111,6 +118,7 @@ export function createTask< const task: Task = { id: params.id, + description: params.description, trigger: async (payload, options) => { const taskMetadata = taskCatalog.getTaskManifest(params.id); @@ -183,6 +191,7 @@ export function createTask< taskCatalog.registerTaskMetadata({ id: params.id, + description: params.description, queue: params.queue, retry: params.retry ? { ...defaultRetryOptions, ...params.retry } : undefined, machine: params.machine, @@ -205,6 +214,31 @@ export function createTask< return task; } +export function createToolTask< + TIdentifier extends string, + TParameters extends ToolTaskParameters, + TOutput = unknown, + TInitOutput extends InitOutput = any, +>( + params: TaskWithToolOptions +): ToolTask { + const task = createSchemaTask({ + ...params, + schema: convertToolParametersToSchema(params.parameters), + }); + + return { + ...task, + tool: { + parameters: params.parameters, + description: params.description, + execute: async (args: any) => { + return task.triggerAndWait(args).unwrap(); + }, + }, + }; +} + export function createSchemaTask< TIdentifier extends string, TSchema extends TaskSchema | undefined = undefined, @@ -212,7 +246,7 @@ export function createSchemaTask< TInitOutput extends InitOutput = any, >( params: TaskWithSchemaOptions -): Task, TOutput> { +): TaskWithSchema { const customQueue = params.queue ? queue({ name: params.queue?.name ?? `task/${params.id}`, @@ -224,8 +258,10 @@ export function createSchemaTask< ? getSchemaParseFn>(params.schema) : undefined; - const task: Task, TOutput> = { + const task: TaskWithSchema = { id: params.id, + description: params.description, + schema: params.schema, trigger: async (payload, options, requestOptions) => { const taskMetadata = taskCatalog.getTaskManifest(params.id); @@ -299,6 +335,7 @@ export function createSchemaTask< taskCatalog.registerTaskMetadata({ id: params.id, + description: params.description, queue: params.queue, retry: params.retry ? { ...defaultRetryOptions, ...params.retry } : undefined, machine: params.machine, diff --git a/packages/trigger-sdk/src/v3/tasks.ts b/packages/trigger-sdk/src/v3/tasks.ts index 391bfc5790..470a73895f 100644 --- a/packages/trigger-sdk/src/v3/tasks.ts +++ b/packages/trigger-sdk/src/v3/tasks.ts @@ -3,6 +3,7 @@ import { batchTriggerAndWait, createTask, createSchemaTask, + createToolTask, SubtaskUnwrapError, trigger, triggerAndPoll, @@ -65,6 +66,8 @@ export const task = createTask; export const schemaTask = createSchemaTask; +export const toolTask = createToolTask; + export const tasks = { trigger, triggerAndPoll, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d508b9866e..df1782fc08 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1338,6 +1338,9 @@ importers: specifier: ^1.5.0 version: 1.5.0(zod@3.22.3) devDependencies: + '@ai-sdk/provider-utils': + specifier: ^1.0.22 + version: 1.0.22(zod@3.22.3) '@arethetypeswrong/cli': specifier: ^0.15.4 version: 0.15.4 @@ -1353,6 +1356,9 @@ importers: '@types/readable-stream': specifier: ^4.0.14 version: 4.0.14 + ai: + specifier: ^3.4.33 + version: 3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) defu: specifier: ^6.1.4 version: 6.1.4 @@ -1462,9 +1468,6 @@ importers: ws: specifier: ^8.11.0 version: 8.12.0 - zod: - specifier: 3.22.3 - version: 3.22.3 devDependencies: '@arethetypeswrong/cli': specifier: ^0.15.4 @@ -1484,6 +1487,9 @@ importers: '@types/ws': specifier: ^8.5.3 version: 8.5.4 + ai: + specifier: ^3.4.33 + version: 3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) encoding: specifier: ^0.1.13 version: 0.1.13 @@ -1502,6 +1508,9 @@ importers: typescript: specifier: ^5.5.4 version: 5.5.4 + zod: + specifier: 3.22.3 + version: 3.22.3 references/bun-catalog: dependencies: @@ -1549,6 +1558,9 @@ importers: references/nextjs-realtime: dependencies: + '@ai-sdk/openai': + specifier: ^0.0.72 + version: 0.0.72(zod@3.22.3) '@fal-ai/serverless-client': specifier: ^0.15.0 version: 0.15.0 @@ -1570,6 +1582,9 @@ importers: '@uploadthing/react': specifier: ^7.0.3 version: 7.0.3(next@14.2.15)(react@18.3.1)(uploadthing@7.1.0) + ai: + specifier: ^3.4.33 + version: 3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) class-variance-authority: specifier: ^0.7.0 version: 0.7.0 @@ -1838,6 +1853,17 @@ packages: resolution: {integrity: sha512-Ff9+ksdQQB3rMncgqDK78uLznstjyfIf2Arnh22pW8kBpLs6rpKDwgnZT46hin5Hl1WzazzK64DOrhSwYpS7bQ==} dev: false + /@ai-sdk/openai@0.0.72(zod@3.22.3): + resolution: {integrity: sha512-IKsgxIt6KJGkEHyMp975xW5VPmetwhI8g9H6dDmwvemBB41IRQa78YMNttiJqPcgmrZX2QfErOICv1gQvZ1gZg==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + dependencies: + '@ai-sdk/provider': 0.0.26 + '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + zod: 3.22.3 + dev: false + /@ai-sdk/provider-utils@1.0.17(zod@3.22.3): resolution: {integrity: sha512-2VyeTH5DQ6AxqvwdyytKIeiZyYTyJffpufWjE67zM2sXMIHgYl7fivo8m5wVl6Cbf1dFPSGKq//C9s+lz+NHrQ==} engines: {node: '>=18'} @@ -1854,6 +1880,21 @@ packages: zod: 3.22.3 dev: false + /@ai-sdk/provider-utils@1.0.22(zod@3.22.3): + resolution: {integrity: sha512-YHK2rpj++wnLVc9vPGzGFP3Pjeld2MwhKinetA0zKXOoHAT/Jit5O8kZsxcSlJPu9wvcGT1UGZEjZrtO7PfFOQ==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + peerDependenciesMeta: + zod: + optional: true + dependencies: + '@ai-sdk/provider': 0.0.26 + eventsource-parser: 1.1.2 + nanoid: 3.3.7 + secure-json-parse: 2.7.0 + zod: 3.22.3 + /@ai-sdk/provider@0.0.22: resolution: {integrity: sha512-smZ1/2jL/JSKnbhC6ama/PxI2D/psj+YAe0c0qpd5ComQCNFltg72VFf0rpUSFMmFuj1pCCNoBOCrvyl8HTZHQ==} engines: {node: '>=18'} @@ -1861,6 +1902,12 @@ packages: json-schema: 0.4.0 dev: false + /@ai-sdk/provider@0.0.26: + resolution: {integrity: sha512-dQkfBDs2lTYpKM8389oopPdQgIU007GQyCbuPPrV+K6MtSII3HBfE0stUIMXUb44L+LK1t6GXPP7wjSzjO6uKg==} + engines: {node: '>=18'} + dependencies: + json-schema: 0.4.0 + /@ai-sdk/react@0.0.53(react@19.0.0-rc.0)(zod@3.22.3): resolution: {integrity: sha512-sIsmTFoR/QHvUUkltmHwP4bPjwy2vko6j/Nj8ayxLhEHs04Ug+dwXQyfA7MwgimEE3BcDQpWL8ikVj0m3ZILWQ==} engines: {node: '>=18'} @@ -1880,6 +1927,25 @@ packages: zod: 3.22.3 dev: false + /@ai-sdk/react@0.0.70(react@18.3.1)(zod@3.22.3): + resolution: {integrity: sha512-GnwbtjW4/4z7MleLiW+TOZC2M29eCg1tOUpuEiYFMmFNZK8mkrqM0PFZMo6UsYeUYMWqEOOcPOU9OQVJMJh7IQ==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + zod: ^3.0.0 + peerDependenciesMeta: + react: + optional: true + zod: + optional: true + dependencies: + '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + '@ai-sdk/ui-utils': 0.0.50(zod@3.22.3) + react: 18.3.1 + swr: 2.2.5(react@18.3.1) + throttleit: 2.1.0 + zod: 3.22.3 + /@ai-sdk/solid@0.0.43(zod@3.22.3): resolution: {integrity: sha512-7PlPLaeMAu97oOY2gjywvKZMYHF+GDfUxYNcuJ4AZ3/MRBatzs/U2r4ClT1iH8uMOcMg02RX6UKzP5SgnUBjVw==} engines: {node: '>=18'} @@ -1895,6 +1961,20 @@ packages: - zod dev: false + /@ai-sdk/solid@0.0.54(zod@3.22.3): + resolution: {integrity: sha512-96KWTVK+opdFeRubqrgaJXoNiDP89gNxFRWUp0PJOotZW816AbhUf4EnDjBjXTLjXL1n0h8tGSE9sZsRkj9wQQ==} + engines: {node: '>=18'} + peerDependencies: + solid-js: ^1.7.7 + peerDependenciesMeta: + solid-js: + optional: true + dependencies: + '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + '@ai-sdk/ui-utils': 0.0.50(zod@3.22.3) + transitivePeerDependencies: + - zod + /@ai-sdk/svelte@0.0.45(svelte@4.2.19)(zod@3.22.3): resolution: {integrity: sha512-w5Sdl0ArFIM3Fp8BbH4TUvlrS84WP/jN/wC1+fghMOXd7ceVO3Yhs9r71wTqndhgkLC7LAEX9Ll7ZEPfW9WBDA==} engines: {node: '>=18'} @@ -1912,6 +1992,22 @@ packages: - zod dev: false + /@ai-sdk/svelte@0.0.57(svelte@4.2.19)(zod@3.22.3): + resolution: {integrity: sha512-SyF9ItIR9ALP9yDNAD+2/5Vl1IT6kchgyDH8xkmhysfJI6WrvJbtO1wdQ0nylvPLcsPoYu+cAlz1krU4lFHcYw==} + engines: {node: '>=18'} + peerDependencies: + svelte: ^3.0.0 || ^4.0.0 || ^5.0.0 + peerDependenciesMeta: + svelte: + optional: true + dependencies: + '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + '@ai-sdk/ui-utils': 0.0.50(zod@3.22.3) + sswr: 2.1.0(svelte@4.2.19) + svelte: 4.2.19 + transitivePeerDependencies: + - zod + /@ai-sdk/ui-utils@0.0.40(zod@3.22.3): resolution: {integrity: sha512-f0eonPUBO13pIO8jA9IGux7IKMeqpvWK22GBr3tOoSRnO5Wg5GEpXZU1V0Po+unpeZHyEPahrWbj5JfXcyWCqw==} engines: {node: '>=18'} @@ -1929,6 +2025,22 @@ packages: zod-to-json-schema: 3.23.2(zod@3.22.3) dev: false + /@ai-sdk/ui-utils@0.0.50(zod@3.22.3): + resolution: {integrity: sha512-Z5QYJVW+5XpSaJ4jYCCAVG7zIAuKOOdikhgpksneNmKvx61ACFaf98pmOd+xnjahl0pIlc/QIe6O4yVaJ1sEaw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + peerDependenciesMeta: + zod: + optional: true + dependencies: + '@ai-sdk/provider': 0.0.26 + '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + json-schema: 0.4.0 + secure-json-parse: 2.7.0 + zod: 3.22.3 + zod-to-json-schema: 3.23.5(zod@3.22.3) + /@ai-sdk/vue@0.0.45(vue@3.4.38)(zod@3.22.3): resolution: {integrity: sha512-bqeoWZqk88TQmfoPgnFUKkrvhOIcOcSH5LMPgzZ8XwDqz5tHHrMHzpPfHCj7XyYn4ROTFK/2kKdC/ta6Ko0fMw==} engines: {node: '>=18'} @@ -1946,6 +2058,22 @@ packages: - zod dev: false + /@ai-sdk/vue@0.0.59(vue@3.4.38)(zod@3.22.3): + resolution: {integrity: sha512-+ofYlnqdc8c4F6tM0IKF0+7NagZRAiqBJpGDJ+6EYhDW8FHLUP/JFBgu32SjxSxC6IKFZxEnl68ZoP/Z38EMlw==} + engines: {node: '>=18'} + peerDependencies: + vue: ^3.3.4 + peerDependenciesMeta: + vue: + optional: true + dependencies: + '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + '@ai-sdk/ui-utils': 0.0.50(zod@3.22.3) + swrv: 1.0.4(vue@3.4.38) + vue: 3.4.38(typescript@5.5.4) + transitivePeerDependencies: + - zod + /@alloc/quick-lru@5.2.0: resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} @@ -15020,7 +15148,6 @@ packages: /@types/diff-match-patch@1.0.36: resolution: {integrity: sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==} - dev: false /@types/docker-modem@3.0.6: resolution: {integrity: sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==} @@ -15220,7 +15347,6 @@ packages: dependencies: '@types/node': 18.19.20 form-data: 3.0.1 - dev: false /@types/node-forge@1.3.10: resolution: {integrity: sha512-y6PJDYN4xYBxwd22l+OVH35N+1fCYWiuC3aiP2SlXVE6Lo7SS+rSx9r89hLxrP4pn6n1lBGhHJ12pj3F3Mpttw==} @@ -15999,14 +16125,12 @@ packages: entities: 4.5.0 estree-walker: 2.0.2 source-map-js: 1.2.0 - dev: false /@vue/compiler-dom@3.4.38: resolution: {integrity: sha512-Osc/c7ABsHXTsETLgykcOwIxFktHfGSUDkb05V61rocEfsFDcjDLH/IHJSNJP+/Sv9KeN2Lx1V6McZzlSb9EhQ==} dependencies: '@vue/compiler-core': 3.4.38 '@vue/shared': 3.4.38 - dev: false /@vue/compiler-sfc@3.4.38: resolution: {integrity: sha512-s5QfZ+9PzPh3T5H4hsQDJtI8x7zdJaew/dCGgqZ2630XdzaZ3AD8xGZfBqpT8oaD/p2eedd+pL8tD5vvt5ZYJQ==} @@ -16020,27 +16144,23 @@ packages: magic-string: 0.30.11 postcss: 8.4.44 source-map-js: 1.2.0 - dev: false /@vue/compiler-ssr@3.4.38: resolution: {integrity: sha512-YXznKFQ8dxYpAz9zLuVvfcXhc31FSPFDcqr0kyujbOwNhlmaNvL2QfIy+RZeJgSn5Fk54CWoEUeW+NVBAogGaw==} dependencies: '@vue/compiler-dom': 3.4.38 '@vue/shared': 3.4.38 - dev: false /@vue/reactivity@3.4.38: resolution: {integrity: sha512-4vl4wMMVniLsSYYeldAKzbk72+D3hUnkw9z8lDeJacTxAkXeDAP1uE9xr2+aKIN0ipOL8EG2GPouVTH6yF7Gnw==} dependencies: '@vue/shared': 3.4.38 - dev: false /@vue/runtime-core@3.4.38: resolution: {integrity: sha512-21z3wA99EABtuf+O3IhdxP0iHgkBs1vuoCAsCKLVJPEjpVqvblwBnTj42vzHRlWDCyxu9ptDm7sI2ZMcWrQqlA==} dependencies: '@vue/reactivity': 3.4.38 '@vue/shared': 3.4.38 - dev: false /@vue/runtime-dom@3.4.38: resolution: {integrity: sha512-afZzmUreU7vKwKsV17H1NDThEEmdYI+GCAK/KY1U957Ig2NATPVjCROv61R19fjZNzMmiU03n79OMnXyJVN0UA==} @@ -16049,7 +16169,6 @@ packages: '@vue/runtime-core': 3.4.38 '@vue/shared': 3.4.38 csstype: 3.1.3 - dev: false /@vue/server-renderer@3.4.38(vue@3.4.38): resolution: {integrity: sha512-NggOTr82FbPEkkUvBm4fTGcwUY8UuTsnWC/L2YZBmvaQ4C4Jl/Ao4HHTB+l7WnFCt5M/dN3l0XLuyjzswGYVCA==} @@ -16059,11 +16178,9 @@ packages: '@vue/compiler-ssr': 3.4.38 '@vue/shared': 3.4.38 vue: 3.4.38(typescript@5.5.4) - dev: false /@vue/shared@3.4.38: resolution: {integrity: sha512-q0xCiLkuWWQLzVrecPb0RMsNWyxICOjPrcrwxTUEHb1fsnvni4dcuyG7RT/Ie7VPTvnjzIaWzRMUBsrqNj/hhw==} - dev: false /@web3-storage/multipart-parser@1.0.0: resolution: {integrity: sha512-BEO6al7BYqcnfX15W2cnGR+Q566ACXAT9UQykORCWW80lmkpWsnEob6zJS1ZVBKsSJC8+7vJkHwlp+lXG1UCdw==} @@ -16352,7 +16469,6 @@ packages: engines: {node: '>= 8.0.0'} dependencies: humanize-ms: 1.2.1 - dev: false /aggregate-error@3.1.0: resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} @@ -16417,6 +16533,48 @@ packages: - vue dev: false + /ai@3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3): + resolution: {integrity: sha512-plBlrVZKwPoRTmM8+D1sJac9Bq8eaa2jiZlHLZIWekKWI1yMWYZvCCEezY9ASPwRhULYDJB2VhKOBUUeg3S5JQ==} + engines: {node: '>=18'} + peerDependencies: + openai: ^4.42.0 + react: ^18 || ^19 || ^19.0.0-rc + sswr: ^2.1.0 + svelte: ^3.0.0 || ^4.0.0 || ^5.0.0 + zod: ^3.0.0 + peerDependenciesMeta: + openai: + optional: true + react: + optional: true + sswr: + optional: true + svelte: + optional: true + zod: + optional: true + dependencies: + '@ai-sdk/provider': 0.0.26 + '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + '@ai-sdk/react': 0.0.70(react@18.3.1)(zod@3.22.3) + '@ai-sdk/solid': 0.0.54(zod@3.22.3) + '@ai-sdk/svelte': 0.0.57(svelte@4.2.19)(zod@3.22.3) + '@ai-sdk/ui-utils': 0.0.50(zod@3.22.3) + '@ai-sdk/vue': 0.0.59(vue@3.4.38)(zod@3.22.3) + '@opentelemetry/api': 1.9.0 + eventsource-parser: 1.1.2 + json-schema: 0.4.0 + jsondiffpatch: 0.6.0 + openai: 4.68.4(zod@3.22.3) + react: 18.3.1 + secure-json-parse: 2.7.0 + svelte: 4.2.19 + zod: 3.22.3 + zod-to-json-schema: 3.23.5(zod@3.22.3) + transitivePeerDependencies: + - solid-js + - vue + /ajv-formats@2.1.1(ajv@8.12.0): resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} peerDependencies: @@ -16927,7 +17085,6 @@ packages: /axobject-query@4.1.0: resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==} engines: {node: '>= 0.4'} - dev: false /b4a@1.6.6: resolution: {integrity: sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==} @@ -17728,7 +17885,6 @@ packages: /client-only@0.0.1: resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} - dev: false /cliui@6.0.0: resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==} @@ -17796,7 +17952,6 @@ packages: acorn: 8.12.1 estree-walker: 3.0.3 periscopic: 3.1.0 - dev: false /codemirror@6.0.1(@lezer/common@1.0.2): resolution: {integrity: sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==} @@ -18287,7 +18442,6 @@ packages: dependencies: mdn-data: 2.0.30 source-map-js: 1.2.0 - dev: false /css-unit-converter@1.1.2: resolution: {integrity: sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==} @@ -18751,7 +18905,6 @@ packages: /diff-match-patch@1.0.5: resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==} - dev: false /diff-sequences@29.6.3: resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} @@ -19037,7 +19190,6 @@ packages: /entities@4.5.0: resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} engines: {node: '>=0.12'} - dev: false /env-paths@2.2.1: resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} @@ -20245,7 +20397,6 @@ packages: /estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} - dev: false /estree-walker@3.0.3: resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} @@ -20295,7 +20446,6 @@ packages: /eventsource-parser@1.1.2: resolution: {integrity: sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==} engines: {node: '>=14.18'} - dev: false /eventsource-parser@3.0.0: resolution: {integrity: sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA==} @@ -20690,7 +20840,6 @@ packages: /form-data-encoder@1.7.2: resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==} - dev: false /form-data@2.3.3: resolution: {integrity: sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==} @@ -20737,7 +20886,6 @@ packages: dependencies: node-domexception: 1.0.0 web-streams-polyfill: 4.0.0-beta.3 - dev: false /formdata-polyfill@4.0.10: resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} @@ -21597,7 +21745,6 @@ packages: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} dependencies: ms: 2.1.3 - dev: false /hyphenate-style-name@1.0.4: resolution: {integrity: sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==} @@ -22357,7 +22504,6 @@ packages: /json-schema@0.4.0: resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} - dev: false /json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} @@ -22390,7 +22536,6 @@ packages: '@types/diff-match-patch': 1.0.36 chalk: 5.3.0 diff-match-patch: 1.0.5 - dev: false /jsonfile@4.0.0: resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} @@ -22556,7 +22701,6 @@ packages: /locate-character@3.0.0: resolution: {integrity: sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==} - dev: false /locate-path@5.0.0: resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} @@ -22997,7 +23141,6 @@ packages: /mdn-data@2.0.30: resolution: {integrity: sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==} - dev: false /media-query-parser@2.0.2: resolution: {integrity: sha512-1N4qp+jE0pL5Xv4uEcwVUhIkwdUO3S/9gML90nqKA7v7FcOS5vUtatfzok9S9U1EJU8dHWlcv95WLnKmmxZI9w==} @@ -23896,7 +24039,6 @@ packages: /node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} - dev: false /node-emoji@1.11.0: resolution: {integrity: sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==} @@ -24390,7 +24532,6 @@ packages: zod: 3.22.3 transitivePeerDependencies: - encoding - dev: false /opener@1.5.2: resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} @@ -26453,7 +26594,6 @@ packages: engines: {node: '>=0.10.0'} dependencies: loose-envify: 1.4.0 - dev: false /react@19.0.0-rc.0: resolution: {integrity: sha512-8nrDCl5uE54FHeKqKrEO0TS+10bT4cxutJGb2okiJc0FHMQ6I3FeItaqly/1nbijlhSO3HmAVyPIexIQQWYAtQ==} @@ -27232,7 +27372,6 @@ packages: /secure-json-parse@2.7.0: resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} - dev: false /seedrandom@3.0.5: resolution: {integrity: sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==} @@ -27797,7 +27936,6 @@ packages: dependencies: svelte: 4.2.19 swrev: 4.0.0 - dev: false /stack-generator@2.0.10: resolution: {integrity: sha512-mwnua/hkqM6pF4k8SnmZ2zfETsRUpWXREfA/goT8SLCV4iOFa4bzOX2nDipWAZFPTjLvQB82f5yaodMVhK0yJQ==} @@ -28229,7 +28367,6 @@ packages: locate-character: 3.0.0 magic-string: 0.30.11 periscopic: 3.1.0 - dev: false /swr@2.2.5(react@18.3.1): resolution: {integrity: sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==} @@ -28239,7 +28376,6 @@ packages: client-only: 0.0.1 react: 18.3.1 use-sync-external-store: 1.2.2(react@18.3.1) - dev: false /swr@2.2.5(react@19.0.0-rc.0): resolution: {integrity: sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==} @@ -28253,7 +28389,6 @@ packages: /swrev@4.0.0: resolution: {integrity: sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==} - dev: false /swrv@1.0.4(vue@3.4.38): resolution: {integrity: sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==} @@ -28261,7 +28396,6 @@ packages: vue: '>=3.2.26 < 4' dependencies: vue: 3.4.38(typescript@5.5.4) - dev: false /sync-content@2.0.1: resolution: {integrity: sha512-NI1mo514yFhr8pV/5Etvgh+pSBUIpoAKoiBIUwALVlQQNAwb40bTw8hhPFaip/dvv0GhpHVOq0vq8iY02ppLTg==} @@ -28645,6 +28779,10 @@ packages: engines: {node: '>=10'} dev: false + /throttleit@2.1.0: + resolution: {integrity: sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==} + engines: {node: '>=18'} + /through2@2.0.5: resolution: {integrity: sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==} dependencies: @@ -29775,7 +29913,6 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: react: 18.3.1 - dev: false /use-sync-external-store@1.2.2(react@19.0.0-rc.0): resolution: {integrity: sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==} @@ -30502,7 +30639,6 @@ packages: '@vue/server-renderer': 3.4.38(vue@3.4.38) '@vue/shared': 3.4.38 typescript: 5.5.4 - dev: false /w3c-keyname@2.2.6: resolution: {integrity: sha512-f+fciywl1SJEniZHD6H+kUO8gOnwIr7f4ijKA6+ZvJFjeGi1r4PDLl53Ayud9O/rk64RqgoQine0feoeOU0kXg==} @@ -30545,7 +30681,6 @@ packages: /web-streams-polyfill@4.0.0-beta.3: resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} engines: {node: '>= 14'} - dev: false /webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} @@ -31137,6 +31272,13 @@ packages: zod: 3.22.3 dev: false + /zod-to-json-schema@3.23.5(zod@3.22.3): + resolution: {integrity: sha512-5wlSS0bXfF/BrL4jPAbz9da5hDlDptdEppYfe+x4eIJ7jioqKG9uUxOwPzqof09u/XeVdrgFu29lZi+8XNDJtA==} + peerDependencies: + zod: ^3.23.3 + dependencies: + zod: 3.22.3 + /zod-validation-error@1.5.0(zod@3.22.3): resolution: {integrity: sha512-/7eFkAI4qV0tcxMBB/3+d2c1P6jzzZYdYSlBuAklzMuCrJu5bzJfHS0yVAS87dRHVlhftd6RFJDIvv03JgkSbw==} engines: {node: '>=16.0.0'} diff --git a/references/nextjs-realtime/package.json b/references/nextjs-realtime/package.json index 40b8e7da35..880ffeb2c5 100644 --- a/references/nextjs-realtime/package.json +++ b/references/nextjs-realtime/package.json @@ -10,6 +10,7 @@ "dev:trigger": "trigger dev" }, "dependencies": { + "@ai-sdk/openai": "^0.0.72", "@fal-ai/serverless-client": "^0.15.0", "@radix-ui/react-icons": "^1.3.0", "@radix-ui/react-scroll-area": "^1.2.0", @@ -17,6 +18,7 @@ "@trigger.dev/react-hooks": "workspace:^3", "@trigger.dev/sdk": "workspace:^3", "@uploadthing/react": "^7.0.3", + "ai": "^3.4.33", "class-variance-authority": "^0.7.0", "clsx": "^2.1.1", "lucide-react": "^0.451.0", diff --git a/references/nextjs-realtime/src/trigger/ai.ts b/references/nextjs-realtime/src/trigger/ai.ts index ef69ebbef2..73a8d786e5 100644 --- a/references/nextjs-realtime/src/trigger/ai.ts +++ b/references/nextjs-realtime/src/trigger/ai.ts @@ -1,40 +1,10 @@ -import { OpenAI } from "openai"; -import { runs, logger, metadata, schemaTask, task, waitUntil } from "@trigger.dev/sdk/v3"; -import { z } from "zod"; +import { openai } from "@ai-sdk/openai"; +import { logger, metadata, runs, schemaTask, task, toolTask, wait } from "@trigger.dev/sdk/v3"; +import { streamText, type TextStreamPart } from "ai"; import { setTimeout } from "node:timers/promises"; +import { z } from "zod"; -const openai = new OpenAI(); - -export const openaiStreaming = schemaTask({ - id: "openai-streaming", - schema: z.object({ - model: z.string().default("chatgpt-4o-latest"), - prompt: z.string().default("Hello, how are you?"), - }), - run: async ({ model, prompt }) => { - logger.info("Running OpenAI model", { model, prompt }); - - const result = await openai.chat.completions.create({ - model: model, - messages: [ - { - role: "system", - content: prompt, - }, - ], - stream: true, - }); - - const stream = await metadata.stream("openai", result); - - for await (const chunk of stream) { - } - - await setTimeout(1000); - }, -}); - -type STREAMS = { openai: OpenAI.Chat.Completions.ChatCompletionChunk }; +type STREAMS = { openai: TextStreamPart<{ getWeather: typeof weatherTask.tool }> }; export const openaiConsumer = schemaTask({ id: "openai-consumer", @@ -56,8 +26,27 @@ export const openaiConsumer = schemaTask({ case "openai": { logger.info("Received OpenAI chunk", { chunk: part.chunk, run: part.run }); - if (part.chunk.choices[0].delta?.content) { - openaiCompletion += part.chunk.choices[0].delta.content; + switch (part.chunk.type) { + case "text-delta": { + openaiCompletion += part.chunk.textDelta; + break; + } + case "tool-call": { + switch (part.chunk.toolName) { + case "getWeather": { + console.log("Calling getWeather tool with args", { args: part.chunk.args }); + } + } + break; + } + case "tool-result": { + switch (part.chunk.toolName) { + case "getWeather": { + console.log("Received getWeather tool result", { result: part.chunk.result }); + } + } + break; + } } } } @@ -73,3 +62,47 @@ export const waitUntilExamples = task({ await setTimeout(30_000); }, }); + +export const weatherTask = toolTask({ + id: "weather", + description: "Get the weather for a location", + parameters: z.object({ + location: z.string(), + }), + run: async ({ location }) => { + // Simulate a long-running task + await wait.for({ seconds: 5 }); + // return mock data + return { + location, + temperature: 72 + Math.floor(Math.random() * 21) - 10, + }; + }, +}); + +export const openaiStreaming = schemaTask({ + id: "openai-streaming", + description: "Stream data from OpenAI", + schema: z.object({ + model: z.string().default("chatgpt-4o-latest"), + prompt: z.string().default("Hello, how are you?"), + }), + run: async ({ model, prompt }) => { + logger.info("Running OpenAI model", { model, prompt }); + + const result = await streamText({ + model: openai(model), + prompt, + tools: { + getWeather: weatherTask.tool, + }, + }); + + const stream = await metadata.stream("openai", result.fullStream); + + for await (const chunk of stream) { + } + + await setTimeout(1000); + }, +}); From 8e214d5bedd4a4d0a27f1187c9db082049bde23c Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 12 Nov 2024 13:14:47 +0000 Subject: [PATCH 08/31] Use the config file path to determine the workingDir, then the package.json path --- packages/cli-v3/src/config.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/cli-v3/src/config.ts b/packages/cli-v3/src/config.ts index fc52cc3d9a..3283c15eb6 100644 --- a/packages/cli-v3/src/config.ts +++ b/packages/cli-v3/src/config.ts @@ -142,7 +142,11 @@ async function resolveConfig( const lockfilePath = await resolveLockfile(cwd); const workspaceDir = await findWorkspaceDir(cwd); - const workingDir = packageJsonPath ? dirname(packageJsonPath) : cwd; + const workingDir = result.configFile + ? dirname(result.configFile) + : packageJsonPath + ? dirname(packageJsonPath) + : cwd; const config = "config" in result.config ? (result.config.config as TriggerConfig) : result.config; From 8c7ee15ce04a6603175fea378c593dcfd577db46 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 12 Nov 2024 13:20:08 +0000 Subject: [PATCH 09/31] Remove stream test files --- stream_fetch_test.js | 125 ------------------------------------------- stream_test.js | 41 -------------- stream_test.sh | 13 ----- 3 files changed, 179 deletions(-) delete mode 100644 stream_fetch_test.js delete mode 100644 stream_test.js delete mode 100755 stream_test.sh diff --git a/stream_fetch_test.js b/stream_fetch_test.js deleted file mode 100644 index 86ad7cba62..0000000000 --- a/stream_fetch_test.js +++ /dev/null @@ -1,125 +0,0 @@ -class MetadataStream { - constructor(options) { - this.options = options; - this.controller = new AbortController(); - this.serverQueue = []; - this.consumerQueue = []; - - const { serverIterator, consumerIterator } = this.createTeeIterators(); - this.serverIterator = serverIterator; - this.consumerIterator = consumerIterator; - - this.streamPromise = this.initializeServerStream(); - } - - createTeeIterators() { - const teeIterator = (queue) => ({ - next: () => { - if (queue.length === 0) { - const result = this.options.iterator.next(); - this.serverQueue.push(result); - this.consumerQueue.push(result); - } - return queue.shift(); - }, - }); - - return { - serverIterator: teeIterator(this.serverQueue), - consumerIterator: teeIterator(this.consumerQueue), - }; - } - - initializeServerStream() { - const serverIterator = this.serverIterator; - - const serverStream = new ReadableStream({ - async pull(controller) { - try { - const { value, done } = await serverIterator.next(); - if (done) { - controller.close(); - return; - } - - console.log("Server sent:", value, new Date().toISOString()); - - controller.enqueue(JSON.stringify(value) + "\n"); - } catch (err) { - controller.error(err); - } - }, - cancel: () => this.controller.abort(), - }); - - return fetch( - `${this.options.baseUrl}/realtime/v1/streams/${this.options.runId}/${this.options.key}`, - { - method: "POST", - headers: {}, - body: serverStream, - duplex: "half", - signal: this.controller.signal, - } - ).catch((error) => { - console.error("Error in stream:", error); - }); - } - - async wait() { - return this.streamPromise.then(() => void 0); - } - - [Symbol.asyncIterator]() { - return this.consumerIterator; - } -} - -// Example usage: -async function* generateSampleData() { - const items = [ - { type: "start", timestamp: Date.now() }, - { type: "progress", value: 25 }, - { type: "progress", value: 50 }, - { type: "progress", value: 75 }, - { type: "complete", timestamp: Date.now() }, - ]; - - for (const item of items) { - await new Promise((resolve) => setTimeout(resolve, 1000)); - yield item; - } -} - -async function runTest() { - const { OpenAI } = require("./references/v3-catalog/node_modules/openai"); - const openai = new OpenAI(); - - const result = await openai.chat.completions.create({ - model: "chatgpt-4o-latest", - messages: [ - { - role: "system", - content: "Can you summarize the plot of The Matrix?", - }, - ], - stream: true, - }); - - const stream = new MetadataStream({ - baseUrl: "http://localhost:3030", - runId: "test_run_1234", - key: "openai", - iterator: result[Symbol.asyncIterator](), - }); - - // Consume the stream - // for await (const value of stream) { - // console.log("Consumer received:", value, new Date().toISOString()); - // } - - await stream.wait(); - console.log("Stream completed", new Date().toISOString()); -} - -runTest().catch(console.error); diff --git a/stream_test.js b/stream_test.js deleted file mode 100644 index 32ed362963..0000000000 --- a/stream_test.js +++ /dev/null @@ -1,41 +0,0 @@ -const http = require("http"); - -const options = { - hostname: "localhost", - port: 3030, - path: "/realtime/v1/streams/test", - method: "POST", - headers: { - "Content-Type": "application/x-ndjson", - "Transfer-Encoding": "chunked", // Enable chunked transfer encoding - }, -}; - -const req = http.request(options, (res) => { - console.log(`STATUS: ${res.statusCode}`); - res.on("data", () => {}); - res.on("end", () => { - console.log("No more data in response."); - }); -}); - -req.on("error", (e) => { - console.error(`Problem with request: ${e.message}`); -}); - -// Function to send data with a delay -const sendData = (message, delay) => { - setTimeout(() => { - console.log(`Sending: ${message}`); - req.write(message + "\n"); - }, delay); -}; - -sendData('{"message": "chunk 1"}', 0); -sendData('{"message": "chunk 2"}', 1000); -sendData('{"message": "chunk 3"}', 2000); -sendData('{"message": "chunk 4"}', 3000); - -setTimeout(() => { - req.end(); -}, 4000); diff --git a/stream_test.sh b/stream_test.sh deleted file mode 100755 index 914f728c3d..0000000000 --- a/stream_test.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/zsh - -( - echo -n '{"message": "chunk 1"}\n' - sleep 1 - echo -n '{"message": "chunk 2"}\n' - sleep 1 - echo -n '{"message": "chunk 3"}\n' - sleep 1 - echo -n '{"message": "chunk 4"}\n' -) | curl -v -X POST "http://localhost:3030/realtime/v1/streams/express/test" \ - -H "Content-Type: application/x-ndjson" \ - --data-binary @- \ No newline at end of file From f1eaf5b94b241407189fb6d2d9fa0b0f8899b1f2 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Wed, 13 Nov 2024 11:49:32 +0000 Subject: [PATCH 10/31] useTaskTrigger react hook that allows triggering a task from the client --- .../routes/api.v1.tasks.$taskId.trigger.ts | 216 +++++++------- .../app/services/authorization.server.ts | 2 +- .../routeBuiilders/apiBuilder.server.ts | 276 +++++++++++++++++- packages/core/src/v3/apiClient/index.ts | 13 + packages/core/src/v3/idempotencyKeys.ts | 100 +++++++ packages/core/src/v3/index.ts | 1 + packages/core/src/v3/types/tasks.ts | 4 +- packages/core/src/v3/utils/ioSerialization.ts | 2 +- .../react-hooks/src/hooks/useTaskTrigger.ts | 64 ++++ packages/react-hooks/src/index.ts | 1 + packages/trigger-sdk/src/v3/auth.ts | 2 +- .../trigger-sdk/src/v3/idempotencyKeys.ts | 86 +----- packages/trigger-sdk/src/v3/shared.ts | 25 +- references/nextjs-realtime/src/app/page.tsx | 13 +- .../src/components/TriggerButton.tsx | 41 +++ 15 files changed, 617 insertions(+), 229 deletions(-) create mode 100644 packages/core/src/v3/idempotencyKeys.ts create mode 100644 packages/react-hooks/src/hooks/useTaskTrigger.ts create mode 100644 references/nextjs-realtime/src/components/TriggerButton.tsx diff --git a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts index aae68c91d6..a32078b2a6 100644 --- a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts +++ b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts @@ -1,15 +1,13 @@ -import { fromZodError } from "zod-validation-error"; -import type { ActionFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; -import { TriggerTaskRequestBody } from "@trigger.dev/core/v3"; +import { generateJWT as internal_generateJWT, TriggerTaskRequestBody } from "@trigger.dev/core/v3"; +import { TaskRun } from "@trigger.dev/database"; import { z } from "zod"; import { env } from "~/env.server"; -import { authenticateApiRequest } from "~/services/apiAuth.server"; +import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { parseRequestJsonAsync } from "~/utils/parseRequestJson.server"; +import { createActionApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; import { ServiceValidationError } from "~/v3/services/baseService.server"; import { OutOfEntitlementError, TriggerTaskService } from "~/v3/services/triggerTask.server"; -import { startActiveSpan } from "~/v3/tracer.server"; const ParamsSchema = z.object({ taskId: z.string(), @@ -20,115 +18,125 @@ export const HeadersSchema = z.object({ "trigger-version": z.string().nullish(), "x-trigger-span-parent-as-link": z.coerce.number().nullish(), "x-trigger-worker": z.string().nullish(), + "x-trigger-client": z.string().nullish(), traceparent: z.string().optional(), tracestate: z.string().optional(), }); -export async function action({ request, params }: ActionFunctionArgs) { - // Ensure this is a POST request - if (request.method.toUpperCase() !== "POST") { - return { status: 405, body: "Method Not Allowed" }; - } - - logger.debug("TriggerTask action", { headers: Object.fromEntries(request.headers) }); - - // Next authenticate the request - const authenticationResult = await authenticateApiRequest(request); - - if (!authenticationResult) { - return json({ error: "Invalid or Missing API key" }, { status: 401 }); - } - - const contentLength = request.headers.get("content-length"); - - if (!contentLength || parseInt(contentLength) > env.TASK_PAYLOAD_MAXIMUM_SIZE) { - return json({ error: "Request body too large" }, { status: 413 }); - } +const { action, loader } = createActionApiRoute( + { + headers: HeadersSchema, + params: ParamsSchema, + body: TriggerTaskRequestBody, + allowJWT: true, + maxContentLength: env.TASK_PAYLOAD_MAXIMUM_SIZE, + authorization: { + action: "write", + resource: (params) => ({ tasks: params.taskId }), + superScopes: ["write:tasks", "admin"], + }, + corsStrategy: "all", + }, + async ({ body, headers, params, authentication }) => { + const { + "idempotency-key": idempotencyKey, + "trigger-version": triggerVersion, + "x-trigger-span-parent-as-link": spanParentAsLink, + traceparent, + tracestate, + "x-trigger-worker": isFromWorker, + "x-trigger-client": triggerClient, + } = headers; + + const service = new TriggerTaskService(); + + try { + const traceContext = + traceparent && isFromWorker /// If the request is from a worker, we should pass the trace context + ? { traceparent, tracestate } + : undefined; + + logger.debug("Triggering task", { + taskId: params.taskId, + idempotencyKey, + triggerVersion, + headers, + options: body.options, + isFromWorker, + traceContext, + }); + + const run = await service.call(params.taskId, authentication.environment, body, { + idempotencyKey: idempotencyKey ?? undefined, + triggerVersion: triggerVersion ?? undefined, + traceContext, + spanParentAsLink: spanParentAsLink === 1, + }); + + if (!run) { + return json({ error: "Task not found" }, { status: 404 }); + } - const rawHeaders = Object.fromEntries(request.headers); + const $responseHeaders = await responseHeaders( + run, + authentication.environment, + triggerClient + ); - const headers = HeadersSchema.safeParse(rawHeaders); + return json( + { + id: run.friendlyId, + }, + { + headers: $responseHeaders, + } + ); + } catch (error) { + if (error instanceof ServiceValidationError) { + return json({ error: error.message }, { status: 422 }); + } else if (error instanceof OutOfEntitlementError) { + return json({ error: error.message }, { status: 422 }); + } else if (error instanceof Error) { + return json({ error: error.message }, { status: 400 }); + } - if (!headers.success) { - return json({ error: "Invalid headers" }, { status: 400 }); + return json({ error: "Something went wrong" }, { status: 500 }); + } } - - const { - "idempotency-key": idempotencyKey, - "trigger-version": triggerVersion, - "x-trigger-span-parent-as-link": spanParentAsLink, - traceparent, - tracestate, - "x-trigger-worker": isFromWorker, - } = headers.data; - - const { taskId } = ParamsSchema.parse(params); - - // Now parse the request body - const anyBody = await parseRequestJsonAsync(request, { taskId }); - - const body = await startActiveSpan("TriggerTaskRequestBody.safeParse()", async (span) => { - return TriggerTaskRequestBody.safeParse(anyBody); +); + +async function responseHeaders( + run: TaskRun, + environment: AuthenticatedEnvironment, + triggerClient?: string | null +): Promise> { + const claimsHeader = JSON.stringify({ + sub: run.runtimeEnvironmentId, + pub: true, }); - if (!body.success) { - return json( - { error: fromZodError(body.error, { prefix: "Invalid trigger call" }).toString() }, - { status: 400 } - ); - } - - const service = new TriggerTaskService(); - - try { - const traceContext = - traceparent && isFromWorker /// If the request is from a worker, we should pass the trace context - ? { traceparent, tracestate } - : undefined; - - logger.debug("Triggering task", { - taskId, - idempotencyKey, - triggerVersion, - headers: Object.fromEntries(request.headers), - options: body.data.options, - isFromWorker, - traceContext, + if (triggerClient === "browser") { + const claims = { + sub: run.runtimeEnvironmentId, + pub: true, + scopes: [`read:runs:${run.friendlyId}`], + }; + + const jwt = await internal_generateJWT({ + secretKey: environment.apiKey, + payload: claims, + expirationTime: "1h", }); - const run = await service.call(taskId, authenticationResult.environment, body.data, { - idempotencyKey: idempotencyKey ?? undefined, - triggerVersion: triggerVersion ?? undefined, - traceContext, - spanParentAsLink: spanParentAsLink === 1, - }); - - if (!run) { - return json({ error: "Task not found" }, { status: 404 }); - } - - return json( - { - id: run.friendlyId, - }, - { - headers: { - "x-trigger-jwt-claims": JSON.stringify({ - sub: authenticationResult.environment.id, - pub: true, - }), - }, - } - ); - } catch (error) { - if (error instanceof ServiceValidationError) { - return json({ error: error.message }, { status: 422 }); - } else if (error instanceof OutOfEntitlementError) { - return json({ error: error.message }, { status: 422 }); - } else if (error instanceof Error) { - return json({ error: error.message }, { status: 400 }); - } - - return json({ error: "Something went wrong" }, { status: 500 }); + return { + "x-trigger-jwt-claims": claimsHeader, + "x-trigger-jwt": jwt, + }; } + + return { + "x-trigger-jwt-claims": claimsHeader, + }; } + +export { action, loader }; diff --git a/apps/webapp/app/services/authorization.server.ts b/apps/webapp/app/services/authorization.server.ts index 7869b5b0fe..5baff68c31 100644 --- a/apps/webapp/app/services/authorization.server.ts +++ b/apps/webapp/app/services/authorization.server.ts @@ -1,4 +1,4 @@ -export type AuthorizationAction = "read"; // Add more actions as needed +export type AuthorizationAction = "read" | "write"; // Add more actions as needed const ResourceTypes = ["tasks", "tags", "runs", "batch"] as const; diff --git a/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts index a0561cecb3..ffd3bb304e 100644 --- a/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts +++ b/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts @@ -1,6 +1,6 @@ import { z } from "zod"; import { ApiAuthenticationResult, authenticateApiRequest } from "../apiAuth.server"; -import { json, LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { ActionFunctionArgs, json, LoaderFunctionArgs } from "@remix-run/server-runtime"; import { fromZodError } from "zod-validation-error"; import { apiCors } from "~/utils/apiCors"; import { @@ -16,10 +16,12 @@ import { type ApiKeyRouteBuilderOptions< TParamsSchema extends z.AnyZodObject | undefined = undefined, - TSearchParamsSchema extends z.AnyZodObject | undefined = undefined + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined > = { params?: TParamsSchema; searchParams?: TSearchParamsSchema; + headers?: THeadersSchema; allowJWT?: boolean; corsStrategy?: "all" | "none"; authorization?: { @@ -28,7 +30,8 @@ type ApiKeyRouteBuilderOptions< params: TParamsSchema extends z.AnyZodObject ? z.infer : undefined, searchParams: TSearchParamsSchema extends z.AnyZodObject ? z.infer - : undefined + : undefined, + headers: THeadersSchema extends z.AnyZodObject ? z.infer : undefined ) => AuthorizationResources; superScopes?: string[]; }; @@ -36,27 +39,31 @@ type ApiKeyRouteBuilderOptions< type ApiKeyHandlerFunction< TParamsSchema extends z.AnyZodObject | undefined, - TSearchParamsSchema extends z.AnyZodObject | undefined + TSearchParamsSchema extends z.AnyZodObject | undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined > = (args: { params: TParamsSchema extends z.AnyZodObject ? z.infer : undefined; searchParams: TSearchParamsSchema extends z.AnyZodObject ? z.infer : undefined; + headers: THeadersSchema extends z.AnyZodObject ? z.infer : undefined; authentication: ApiAuthenticationResult; request: Request; }) => Promise; export function createLoaderApiRoute< TParamsSchema extends z.AnyZodObject | undefined = undefined, - TSearchParamsSchema extends z.AnyZodObject | undefined = undefined + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined >( - options: ApiKeyRouteBuilderOptions, - handler: ApiKeyHandlerFunction + options: ApiKeyRouteBuilderOptions, + handler: ApiKeyHandlerFunction ) { return async function loader({ request, params }: LoaderFunctionArgs) { const { params: paramsSchema, searchParams: searchParamsSchema, + headers: headersSchema, allowJWT = false, corsStrategy = "none", authorization, @@ -109,9 +116,26 @@ export function createLoaderApiRoute< parsedSearchParams = parsed.data; } + let parsedHeaders: any = undefined; + if (headersSchema) { + const rawHeaders = Object.fromEntries(request.headers); + const headers = headersSchema.safeParse(rawHeaders); + if (!headers.success) { + return wrapResponse( + request, + json( + { error: "Headers Error", details: fromZodError(headers.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedHeaders = headers.data; + } + if (authorization) { const { action, resource, superScopes } = authorization; - const $resource = resource(parsedParams, parsedSearchParams); + const $resource = resource(parsedParams, parsedSearchParams, parsedHeaders); logger.debug("Checking authorization", { action, @@ -133,6 +157,7 @@ export function createLoaderApiRoute< const result = await handler({ params: parsedParams, searchParams: parsedSearchParams, + headers: parsedHeaders, authentication: authenticationResult, request, }); @@ -153,36 +178,42 @@ export function createLoaderApiRoute< type PATRouteBuilderOptions< TParamsSchema extends z.AnyZodObject | undefined = undefined, - TSearchParamsSchema extends z.AnyZodObject | undefined = undefined + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined > = { params?: TParamsSchema; searchParams?: TSearchParamsSchema; + headers?: THeadersSchema; corsStrategy?: "all" | "none"; }; type PATHandlerFunction< TParamsSchema extends z.AnyZodObject | undefined, - TSearchParamsSchema extends z.AnyZodObject | undefined + TSearchParamsSchema extends z.AnyZodObject | undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined > = (args: { params: TParamsSchema extends z.AnyZodObject ? z.infer : undefined; searchParams: TSearchParamsSchema extends z.AnyZodObject ? z.infer : undefined; + headers: THeadersSchema extends z.AnyZodObject ? z.infer : undefined; authentication: PersonalAccessTokenAuthenticationResult; request: Request; }) => Promise; export function createLoaderPATApiRoute< TParamsSchema extends z.AnyZodObject | undefined = undefined, - TSearchParamsSchema extends z.AnyZodObject | undefined = undefined + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined >( - options: PATRouteBuilderOptions, - handler: PATHandlerFunction + options: PATRouteBuilderOptions, + handler: PATHandlerFunction ) { return async function loader({ request, params }: LoaderFunctionArgs) { const { params: paramsSchema, searchParams: searchParamsSchema, + headers: headersSchema, corsStrategy = "none", } = options; @@ -233,10 +264,28 @@ export function createLoaderPATApiRoute< parsedSearchParams = parsed.data; } + let parsedHeaders: any = undefined; + if (headersSchema) { + const rawHeaders = Object.fromEntries(request.headers); + const headers = headersSchema.safeParse(rawHeaders); + if (!headers.success) { + return wrapResponse( + request, + json( + { error: "Headers Error", details: fromZodError(headers.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedHeaders = headers.data; + } + try { const result = await handler({ params: parsedParams, searchParams: parsedSearchParams, + headers: parsedHeaders, authentication: authenticationResult, request, }); @@ -255,6 +304,207 @@ export function createLoaderPATApiRoute< }; } +type ApiKeyActionRouteBuilderOptions< + TParamsSchema extends z.AnyZodObject | undefined = undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined, + TBodySchema extends z.AnyZodObject | undefined = undefined +> = ApiKeyRouteBuilderOptions & { + maxContentLength?: number; + body?: TBodySchema; +}; + +type ApiKeyActionHandlerFunction< + TParamsSchema extends z.AnyZodObject | undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined, + TBodySchema extends z.AnyZodObject | undefined = undefined +> = (args: { + params: TParamsSchema extends z.AnyZodObject ? z.infer : undefined; + searchParams: TSearchParamsSchema extends z.AnyZodObject + ? z.infer + : undefined; + headers: THeadersSchema extends z.AnyZodObject ? z.infer : undefined; + body: TBodySchema extends z.AnyZodObject ? z.infer : undefined; + authentication: ApiAuthenticationResult; + request: Request; +}) => Promise; + +export function createActionApiRoute< + TParamsSchema extends z.AnyZodObject | undefined = undefined, + TSearchParamsSchema extends z.AnyZodObject | undefined = undefined, + THeadersSchema extends z.AnyZodObject | undefined = undefined, + TBodySchema extends z.AnyZodObject | undefined = undefined +>( + options: ApiKeyActionRouteBuilderOptions< + TParamsSchema, + TSearchParamsSchema, + THeadersSchema, + TBodySchema + >, + handler: ApiKeyActionHandlerFunction< + TParamsSchema, + TSearchParamsSchema, + THeadersSchema, + TBodySchema + > +) { + const { + params: paramsSchema, + searchParams: searchParamsSchema, + headers: headersSchema, + body: bodySchema, + allowJWT = false, + corsStrategy = "none", + authorization, + maxContentLength, + } = options; + + async function loader({ request, params }: LoaderFunctionArgs) { + if (corsStrategy !== "none" && request.method.toUpperCase() === "OPTIONS") { + return apiCors(request, json({})); + } + + return new Response(null, { status: 405 }); + } + + async function action({ request, params }: ActionFunctionArgs) { + const authenticationResult = await authenticateApiRequest(request, { allowJWT }); + + if (!authenticationResult) { + return wrapResponse( + request, + json({ error: "Invalid or Missing API key" }, { status: 401 }), + corsStrategy !== "none" + ); + } + + if (maxContentLength) { + const contentLength = request.headers.get("content-length"); + + if (!contentLength || parseInt(contentLength) > maxContentLength) { + return json({ error: "Request body too large" }, { status: 413 }); + } + } + + let parsedParams: any = undefined; + if (paramsSchema) { + const parsed = paramsSchema.safeParse(params); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Params Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedParams = parsed.data; + } + + let parsedSearchParams: any = undefined; + if (searchParamsSchema) { + const searchParams = Object.fromEntries(new URL(request.url).searchParams); + const parsed = searchParamsSchema.safeParse(searchParams); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Query Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedSearchParams = parsed.data; + } + + let parsedHeaders: any = undefined; + if (headersSchema) { + const rawHeaders = Object.fromEntries(request.headers); + const headers = headersSchema.safeParse(rawHeaders); + if (!headers.success) { + return wrapResponse( + request, + json( + { error: "Headers Error", details: fromZodError(headers.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedHeaders = headers.data; + } + + let parsedBody: any = undefined; + if (bodySchema) { + const rawBody = await request.text(); + if (rawBody.length === 0) { + return wrapResponse( + request, + json({ error: "Request body is empty" }, { status: 400 }), + corsStrategy !== "none" + ); + } + + const body = bodySchema.safeParse(JSON.parse(rawBody)); + if (!body.success) { + return wrapResponse( + request, + json({ error: fromZodError(body.error).toString() }, { status: 400 }), + corsStrategy !== "none" + ); + } + parsedBody = body.data; + } + + if (authorization) { + const { action, resource, superScopes } = authorization; + const $resource = resource(parsedParams, parsedSearchParams, parsedHeaders); + + logger.debug("Checking authorization", { + action, + resource: $resource, + superScopes, + scopes: authenticationResult.scopes, + }); + + if (!checkAuthorization(authenticationResult, action, $resource, superScopes)) { + return wrapResponse( + request, + json({ error: "Unauthorized" }, { status: 403 }), + corsStrategy !== "none" + ); + } + } + + try { + const result = await handler({ + params: parsedParams, + searchParams: parsedSearchParams, + headers: parsedHeaders, + body: parsedBody, + authentication: authenticationResult, + request, + }); + return wrapResponse(request, result, corsStrategy !== "none"); + } catch (error) { + console.error("Error in API route:", error); + if (error instanceof Response) { + return wrapResponse(request, error, corsStrategy !== "none"); + } + return wrapResponse( + request, + json({ error: "Internal Server Error" }, { status: 500 }), + corsStrategy !== "none" + ); + } + } + + return { loader, action }; +} + function wrapResponse(request: Request, response: Response, useCors: boolean) { return useCors ? apiCors(request, response) : response; } diff --git a/packages/core/src/v3/apiClient/index.ts b/packages/core/src/v3/apiClient/index.ts index 09275c0e1f..445d54353f 100644 --- a/packages/core/src/v3/apiClient/index.ts +++ b/packages/core/src/v3/apiClient/index.ts @@ -182,6 +182,15 @@ export class ApiClient { ) .withResponse() .then(async ({ response, data }) => { + const jwtHeader = response.headers.get("x-trigger-jwt"); + + if (typeof jwtHeader === "string") { + return { + ...data, + publicAccessToken: jwtHeader, + }; + } + const claimsHeader = response.headers.get("x-trigger-jwt-claims"); const claims = claimsHeader ? JSON.parse(claimsHeader) : undefined; @@ -653,6 +662,10 @@ export class ApiClient { } } + if (typeof window !== "undefined" && typeof window.document !== "undefined") { + headers["x-trigger-client"] = "browser"; + } + return headers; } diff --git a/packages/core/src/v3/idempotencyKeys.ts b/packages/core/src/v3/idempotencyKeys.ts new file mode 100644 index 0000000000..7a8e053018 --- /dev/null +++ b/packages/core/src/v3/idempotencyKeys.ts @@ -0,0 +1,100 @@ +import { taskContext } from "./task-context-api.js"; +import { IdempotencyKey } from "./types/idempotencyKeys.js"; + +export function isIdempotencyKey( + value: string | string[] | IdempotencyKey +): value is IdempotencyKey { + // Cannot check the brand at runtime because it doesn't exist (it's a TypeScript-only construct) + return typeof value === "string" && value.length === 64; +} + +export async function makeIdempotencyKey( + idempotencyKey?: IdempotencyKey | string | string[] +): Promise { + if (!idempotencyKey) { + return; + } + + if (isIdempotencyKey(idempotencyKey)) { + return idempotencyKey; + } + + return await createIdempotencyKey(idempotencyKey, { scope: "global" }); +} + +/** + * Creates a deterministic idempotency key based on the provided key material. + * + * If running inside a task, the task run ID is automatically included in the key material, giving you a unique key per task run. + * This ensures that a given child task is only triggered once per task run, even if the parent task is retried. + * + * @param {string | string[]} key The key material to create the idempotency key from. + * @param {object} [options] Additional options. + * @param {"run" | "attempt" | "global"} [options.scope="run"] The scope of the idempotency key. + * + * @returns {Promise} The idempotency key as a branded string. + * + * @example + * + * ```typescript + * import { idempotencyKeys, task } from "@trigger.dev/sdk/v3"; + * + * export const myTask = task({ + * id: "my-task", + * run: async (payload: any) => { + * const idempotencyKey = await idempotencyKeys.create("my-task-key"); + * + * // Use the idempotency key when triggering child tasks + * await childTask.triggerAndWait(payload, { idempotencyKey }); + * } + * }); + * ``` + * + * You can also use the `scope` parameter to create a key that is unique per task run, task run attempts (retries of the same run), or globally: + * + * ```typescript + * await idempotencyKeys.create("my-task-key", { scope: "attempt" }); // Creates a key that is unique per task run attempt + * await idempotencyKeys.create("my-task-key", { scope: "global" }); // Skips including the task run ID + * ``` + */ +export async function createIdempotencyKey( + key: string | string[], + options?: { scope?: "run" | "attempt" | "global" } +): Promise { + const idempotencyKey = await generateIdempotencyKey( + [...(Array.isArray(key) ? key : [key])].concat(injectScope(options?.scope ?? "run")) + ); + + return idempotencyKey as IdempotencyKey; +} + +function injectScope(scope: "run" | "attempt" | "global"): string[] { + switch (scope) { + case "run": { + if (taskContext?.ctx) { + return [taskContext.ctx.run.id]; + } + break; + } + case "attempt": { + if (taskContext?.ctx) { + return [taskContext.ctx.attempt.id]; + } + break; + } + } + + return []; +} + +async function generateIdempotencyKey(keyMaterial: string[]) { + const hash = await crypto.subtle.digest( + "SHA-256", + new TextEncoder().encode(keyMaterial.join("-")) + ); + + // Return a hex string, using cross-runtime compatible methods + return Array.from(new Uint8Array(hash)) + .map((byte) => byte.toString(16).padStart(2, "0")) + .join(""); +} diff --git a/packages/core/src/v3/index.ts b/packages/core/src/v3/index.ts index 17cba85590..12fbc8b2d2 100644 --- a/packages/core/src/v3/index.ts +++ b/packages/core/src/v3/index.ts @@ -20,6 +20,7 @@ export * from "./task-catalog-api.js"; export * from "./types/index.js"; export { links } from "./links.js"; export * from "./jwt.js"; +export * from "./idempotencyKeys.js"; export { formatDuration, formatDurationInDays, diff --git a/packages/core/src/v3/types/tasks.ts b/packages/core/src/v3/types/tasks.ts index dd507508f0..0039c4987d 100644 --- a/packages/core/src/v3/types/tasks.ts +++ b/packages/core/src/v3/types/tasks.ts @@ -418,9 +418,7 @@ export type BatchResult = { runs: TaskRunResult[]; }; -export type BatchItem = TInput extends void - ? { payload?: TInput; options?: TaskRunOptions } - : { payload: TInput; options?: TaskRunOptions }; +export type BatchItem = { payload: TInput; options?: TaskRunOptions }; export interface Task { /** diff --git a/packages/core/src/v3/utils/ioSerialization.ts b/packages/core/src/v3/utils/ioSerialization.ts index b1a8d4587f..aa623d6d57 100644 --- a/packages/core/src/v3/utils/ioSerialization.ts +++ b/packages/core/src/v3/utils/ioSerialization.ts @@ -403,7 +403,7 @@ async function loadSuperJSON() { superjson.registerCustom( { - isApplicable: (v): v is Buffer => v instanceof Buffer, + isApplicable: (v): v is Buffer => typeof Buffer === "function" && Buffer.isBuffer(v), serialize: (v) => [...v], deserialize: (v) => Buffer.from(v), }, diff --git a/packages/react-hooks/src/hooks/useTaskTrigger.ts b/packages/react-hooks/src/hooks/useTaskTrigger.ts new file mode 100644 index 0000000000..f08a7130bc --- /dev/null +++ b/packages/react-hooks/src/hooks/useTaskTrigger.ts @@ -0,0 +1,64 @@ +"use client"; + +import { + type AnyTask, + type TaskIdentifier, + type TaskPayload, + InferRunTypes, + makeIdempotencyKey, + RunHandleFromTypes, + stringifyIO, + TaskRunOptions, +} from "@trigger.dev/core/v3"; +import useSWRMutation from "swr/mutation"; +import { useApiClient } from "./useApiClient.js"; + +export interface TriggerInstance { + submit: (payload: TaskPayload) => void; + isLoading: boolean; + handle?: RunHandleFromTypes>; +} + +export function useTaskTrigger( + id: TaskIdentifier +): TriggerInstance { + const apiClient = useApiClient(); + + async function triggerTask( + id: string, + { + arg: { payload, options }, + }: { arg: { payload: TaskPayload; options?: TaskRunOptions } } + ) { + const payloadPacket = await stringifyIO(payload); + + const handle = await apiClient.triggerTask(id, { + payload: payloadPacket.data, + options: { + queue: options?.queue, + concurrencyKey: options?.concurrencyKey, + payloadType: payloadPacket.dataType, + idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey), + delay: options?.delay, + ttl: options?.ttl, + tags: options?.tags, + maxAttempts: options?.maxAttempts, + metadata: options?.metadata, + maxDuration: options?.maxDuration, + }, + }); + + return { ...handle, taskIdentifier: id }; + } + + const mutation = useSWRMutation(id as string, triggerTask); + + return { + submit: (payload) => { + // trigger the task with the given payload + mutation.trigger({ payload }); + }, + isLoading: mutation.isMutating, + handle: mutation.data as RunHandleFromTypes>, + }; +} diff --git a/packages/react-hooks/src/index.ts b/packages/react-hooks/src/index.ts index 7a0d0043de..560fb37660 100644 --- a/packages/react-hooks/src/index.ts +++ b/packages/react-hooks/src/index.ts @@ -4,3 +4,4 @@ export * from "./hooks/useRun.js"; export * from "./hooks/useRealtimeRun.js"; export * from "./hooks/useRealtimeRunsWithTag.js"; export * from "./hooks/useRealtimeBatch.js"; +export * from "./hooks/useTaskTrigger.js"; diff --git a/packages/trigger-sdk/src/v3/auth.ts b/packages/trigger-sdk/src/v3/auth.ts index 687d64c563..3bf027ea44 100644 --- a/packages/trigger-sdk/src/v3/auth.ts +++ b/packages/trigger-sdk/src/v3/auth.ts @@ -28,7 +28,7 @@ export const auth = { withAuth, }; -type PublicTokenPermissionAction = "read"; // Add more actions as needed +type PublicTokenPermissionAction = "read" | "write"; // Add more actions as needed type PublicTokenPermissionProperties = { /** diff --git a/packages/trigger-sdk/src/v3/idempotencyKeys.ts b/packages/trigger-sdk/src/v3/idempotencyKeys.ts index 8322e620bf..87e3be03d8 100644 --- a/packages/trigger-sdk/src/v3/idempotencyKeys.ts +++ b/packages/trigger-sdk/src/v3/idempotencyKeys.ts @@ -1,91 +1,7 @@ -import { type IdempotencyKey, taskContext } from "@trigger.dev/core/v3"; +import { createIdempotencyKey, type IdempotencyKey } from "@trigger.dev/core/v3"; export const idempotencyKeys = { create: createIdempotencyKey, }; export type { IdempotencyKey }; - -export function isIdempotencyKey( - value: string | string[] | IdempotencyKey -): value is IdempotencyKey { - // Cannot check the brand at runtime because it doesn't exist (it's a TypeScript-only construct) - return typeof value === "string" && value.length === 64; -} - -/** - * Creates a deterministic idempotency key based on the provided key material. - * - * If running inside a task, the task run ID is automatically included in the key material, giving you a unique key per task run. - * This ensures that a given child task is only triggered once per task run, even if the parent task is retried. - * - * @param {string | string[]} key The key material to create the idempotency key from. - * @param {object} [options] Additional options. - * @param {"run" | "attempt" | "global"} [options.scope="run"] The scope of the idempotency key. - * - * @returns {Promise} The idempotency key as a branded string. - * - * @example - * - * ```typescript - * import { idempotencyKeys, task } from "@trigger.dev/sdk/v3"; - * - * export const myTask = task({ - * id: "my-task", - * run: async (payload: any) => { - * const idempotencyKey = await idempotencyKeys.create("my-task-key"); - * - * // Use the idempotency key when triggering child tasks - * await childTask.triggerAndWait(payload, { idempotencyKey }); - * } - * }); - * ``` - * - * You can also use the `scope` parameter to create a key that is unique per task run, task run attempts (retries of the same run), or globally: - * - * ```typescript - * await idempotencyKeys.create("my-task-key", { scope: "attempt" }); // Creates a key that is unique per task run attempt - * await idempotencyKeys.create("my-task-key", { scope: "global" }); // Skips including the task run ID - * ``` - */ -async function createIdempotencyKey( - key: string | string[], - options?: { scope?: "run" | "attempt" | "global" } -): Promise { - const idempotencyKey = await generateIdempotencyKey( - [...(Array.isArray(key) ? key : [key])].concat(injectScope(options?.scope ?? "run")) - ); - - return idempotencyKey as IdempotencyKey; -} - -function injectScope(scope: "run" | "attempt" | "global"): string[] { - switch (scope) { - case "run": { - if (taskContext?.ctx) { - return [taskContext.ctx.run.id]; - } - break; - } - case "attempt": { - if (taskContext?.ctx) { - return [taskContext.ctx.attempt.id]; - } - break; - } - } - - return []; -} - -async function generateIdempotencyKey(keyMaterial: string[]) { - const hash = await crypto.subtle.digest( - "SHA-256", - new TextEncoder().encode(keyMaterial.join("-")) - ); - - // Return a hex string, using cross-runtime compatible methods - return Array.from(new Uint8Array(hash)) - .map((byte) => byte.toString(16).padStart(2, "0")) - .join(""); -} diff --git a/packages/trigger-sdk/src/v3/shared.ts b/packages/trigger-sdk/src/v3/shared.ts index 28afc7e38b..31185de19d 100644 --- a/packages/trigger-sdk/src/v3/shared.ts +++ b/packages/trigger-sdk/src/v3/shared.ts @@ -17,6 +17,7 @@ import { getSchemaParseFn, InitOutput, logger, + makeIdempotencyKey, parsePacket, Queue, QueueOptions, @@ -30,10 +31,8 @@ import { TaskRunExecutionResult, TaskRunPromise, } from "@trigger.dev/core/v3"; -import { IdempotencyKey, idempotencyKeys, isIdempotencyKey } from "./idempotencyKeys.js"; import { PollOptions, runs } from "./runs.js"; import { tracer } from "./tracer.js"; -import type { Schema as AISchema, CoreTool } from "ai"; import type { AnyRunHandle, @@ -534,7 +533,7 @@ async function trigger_internal( concurrencyKey: options?.concurrencyKey, test: taskContext.ctx?.run.isTest, payloadType: payloadPacket.dataType, - idempotencyKey: await makeKey(options?.idempotencyKey), + idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey), delay: options?.delay, ttl: options?.ttl, tags: options?.tags, @@ -597,7 +596,7 @@ async function batchTrigger_internal( concurrencyKey: item.options?.concurrencyKey, test: taskContext.ctx?.run.isTest, payloadType: payloadPacket.dataType, - idempotencyKey: await makeKey(item.options?.idempotencyKey), + idempotencyKey: await makeIdempotencyKey(item.options?.idempotencyKey), delay: item.options?.delay, ttl: item.options?.ttl, tags: item.options?.tags, @@ -667,7 +666,7 @@ async function triggerAndWait_internal( concurrencyKey: options?.concurrencyKey, test: taskContext.ctx?.run.isTest, payloadType: payloadPacket.dataType, - idempotencyKey: await makeKey(options?.idempotencyKey), + idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey), delay: options?.delay, ttl: options?.ttl, tags: options?.tags, @@ -764,7 +763,7 @@ async function batchTriggerAndWait_internal( concurrencyKey: item.options?.concurrencyKey, test: taskContext.ctx?.run.isTest, payloadType: payloadPacket.dataType, - idempotencyKey: await makeKey(item.options?.idempotencyKey), + idempotencyKey: await makeIdempotencyKey(item.options?.idempotencyKey), delay: item.options?.delay, ttl: item.options?.ttl, tags: item.options?.tags, @@ -929,17 +928,3 @@ async function handleTaskRunExecutionResult( }; } } - -async function makeKey( - idempotencyKey?: IdempotencyKey | string | string[] -): Promise { - if (!idempotencyKey) { - return; - } - - if (isIdempotencyKey(idempotencyKey)) { - return idempotencyKey; - } - - return await idempotencyKeys.create(idempotencyKey, { scope: "global" }); -} diff --git a/references/nextjs-realtime/src/app/page.tsx b/references/nextjs-realtime/src/app/page.tsx index 498712adbc..9c06bd95d3 100644 --- a/references/nextjs-realtime/src/app/page.tsx +++ b/references/nextjs-realtime/src/app/page.tsx @@ -1,8 +1,18 @@ import RunButton from "@/components/RunButton"; import BatchRunButton from "@/components/BatchRunButton"; +import TriggerButton from "@/components/TriggerButton"; import { ImageUploadDropzone } from "@/components/ImageUploadButton"; +import { auth } from "@trigger.dev/sdk/v3"; + +export default async function Home() { + const publicAccessToken = await auth.createPublicToken({ + scopes: { + write: { + tasks: ["openai-streaming"], + }, + }, + }); -export default function Home() { return (
@@ -14,6 +24,7 @@ export default function Home() {
+
); diff --git a/references/nextjs-realtime/src/components/TriggerButton.tsx b/references/nextjs-realtime/src/components/TriggerButton.tsx new file mode 100644 index 0000000000..c0d1d56885 --- /dev/null +++ b/references/nextjs-realtime/src/components/TriggerButton.tsx @@ -0,0 +1,41 @@ +"use client"; + +import { Button } from "@/components/ui/button"; +import { type openaiStreaming } from "@/trigger/ai"; +import { TriggerAuthContext, useTaskTrigger } from "@trigger.dev/react-hooks"; + +function TriggerButton() { + const { submit, handle, isLoading } = useTaskTrigger("openai-streaming"); + + console.log(handle); + + return ( + + ); +} + +export default function TriggerButtonClientWrapper({ + publicAccessToken, +}: { + publicAccessToken: string; +}) { + return ( + + + + ); +} From a02ad730b352a64d1df892fd2174817ad43036cc Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 14 Nov 2024 11:50:42 +0000 Subject: [PATCH 11/31] Add streaming support for the realtime react hooks --- .../routes/api.v1.tasks.$taskId.trigger.ts | 4 +- apps/webapp/app/services/apiAuth.server.ts | 1 + .../routeBuiilders/apiBuilder.server.ts | 4 +- apps/webapp/app/utils/apiCors.ts | 1 + packages/core/src/v3/apiClient/index.ts | 25 +- packages/core/src/v3/apiClient/runStream.ts | 10 +- packages/react-hooks/package.json | 2 +- packages/react-hooks/src/contexts.tsx | 4 +- .../react-hooks/src/hooks/useApiClient.ts | 28 +- packages/react-hooks/src/hooks/useRealtime.ts | 507 ++++++++++++++++++ .../react-hooks/src/hooks/useRealtimeBatch.ts | 66 --- .../react-hooks/src/hooks/useRealtimeRun.ts | 46 -- .../src/hooks/useRealtimeRunsWithTag.ts | 58 -- packages/react-hooks/src/index.ts | 4 +- packages/react-hooks/src/utils/throttle.ts | 5 + packages/trigger-sdk/src/v3/index.ts | 1 + packages/trigger-sdk/src/v3/runs.ts | 11 +- pnpm-lock.yaml | 3 + .../src/app/ai/[id]/ClientAiDetails.tsx | 84 +++ .../nextjs-realtime/src/app/ai/[id]/page.tsx | 15 + .../src/components/TriggerButton.tsx | 12 +- .../src/components/UploadImageDisplay.tsx | 2 +- references/nextjs-realtime/src/trigger/ai.ts | 14 +- 23 files changed, 704 insertions(+), 203 deletions(-) create mode 100644 packages/react-hooks/src/hooks/useRealtime.ts delete mode 100644 packages/react-hooks/src/hooks/useRealtimeBatch.ts delete mode 100644 packages/react-hooks/src/hooks/useRealtimeRun.ts delete mode 100644 packages/react-hooks/src/hooks/useRealtimeRunsWithTag.ts create mode 100644 packages/react-hooks/src/utils/throttle.ts create mode 100644 references/nextjs-realtime/src/app/ai/[id]/ClientAiDetails.tsx create mode 100644 references/nextjs-realtime/src/app/ai/[id]/page.tsx diff --git a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts index a32078b2a6..8586751062 100644 --- a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts +++ b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts @@ -111,13 +111,13 @@ async function responseHeaders( triggerClient?: string | null ): Promise> { const claimsHeader = JSON.stringify({ - sub: run.runtimeEnvironmentId, + sub: environment.id, pub: true, }); if (triggerClient === "browser") { const claims = { - sub: run.runtimeEnvironmentId, + sub: environment.id, pub: true, scopes: [`read:runs:${run.friendlyId}`], }; diff --git a/apps/webapp/app/services/apiAuth.server.ts b/apps/webapp/app/services/apiAuth.server.ts index 4e4917ce65..ecf1d022db 100644 --- a/apps/webapp/app/services/apiAuth.server.ts +++ b/apps/webapp/app/services/apiAuth.server.ts @@ -41,6 +41,7 @@ export async function authenticateApiRequest( options: { allowPublicKey?: boolean; allowJWT?: boolean } = {} ): Promise { const apiKey = getApiKeyFromRequest(request); + if (!apiKey) { return; } diff --git a/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts index ffd3bb304e..f278ecba3f 100644 --- a/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts +++ b/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts @@ -506,5 +506,7 @@ export function createActionApiRoute< } function wrapResponse(request: Request, response: Response, useCors: boolean) { - return useCors ? apiCors(request, response) : response; + return useCors + ? apiCors(request, response, { exposedHeaders: ["x-trigger-jwt", "x-trigger-jwt-claims"] }) + : response; } diff --git a/apps/webapp/app/utils/apiCors.ts b/apps/webapp/app/utils/apiCors.ts index a75322b37a..fc07fadcc2 100644 --- a/apps/webapp/app/utils/apiCors.ts +++ b/apps/webapp/app/utils/apiCors.ts @@ -8,6 +8,7 @@ type CorsOptions = { maxAge?: number; origin?: boolean | string; credentials?: boolean; + exposedHeaders?: string[]; }; export async function apiCors( diff --git a/packages/core/src/v3/apiClient/index.ts b/packages/core/src/v3/apiClient/index.ts index 445d54353f..76eaafadd5 100644 --- a/packages/core/src/v3/apiClient/index.ts +++ b/packages/core/src/v3/apiClient/index.ts @@ -45,6 +45,7 @@ import { RunStreamCallback, RunSubscription, TaskRunShape, + RealtimeRun, } from "./runStream.js"; import { CreateEnvironmentVariableParams, @@ -88,7 +89,14 @@ const DEFAULT_ZOD_FETCH_OPTIONS: ZodFetchOptions = { export { isRequestOptions }; export type { ApiRequestOptions }; -export type { RunShape, AnyRunShape, TaskRunShape, RunStreamCallback, RunSubscription }; +export type { + RunShape, + AnyRunShape, + TaskRunShape, + RealtimeRun, + RunStreamCallback, + RunSubscription, +}; /** * Trigger.dev v3 API client @@ -603,15 +611,19 @@ export class ApiClient { ); } - subscribeToRun(runId: string) { + subscribeToRun(runId: string, options?: { signal?: AbortSignal }) { return runShapeStream(`${this.baseUrl}/realtime/v1/runs/${runId}`, { closeOnComplete: true, headers: this.#getRealtimeHeaders(), client: this, + signal: options?.signal, }); } - subscribeToRunsWithTag(tag: string | string[]) { + subscribeToRunsWithTag( + tag: string | string[], + options?: { signal?: AbortSignal } + ) { const searchParams = createSearchQueryForSubscribeToRuns({ tags: tag, }); @@ -622,15 +634,20 @@ export class ApiClient { closeOnComplete: false, headers: this.#getRealtimeHeaders(), client: this, + signal: options?.signal, } ); } - subscribeToBatch(batchId: string) { + subscribeToBatch( + batchId: string, + options?: { signal?: AbortSignal } + ) { return runShapeStream(`${this.baseUrl}/realtime/v1/batches/${batchId}`, { closeOnComplete: false, headers: this.#getRealtimeHeaders(), client: this, + signal: options?.signal, }); } diff --git a/packages/core/src/v3/apiClient/runStream.ts b/packages/core/src/v3/apiClient/runStream.ts index 3ecd60c64b..58302e1e32 100644 --- a/packages/core/src/v3/apiClient/runStream.ts +++ b/packages/core/src/v3/apiClient/runStream.ts @@ -42,6 +42,7 @@ export type RunShape = TRunTypes extends AnyRunTy export type AnyRunShape = RunShape; export type TaskRunShape = RunShape>; +export type RealtimeRun = TaskRunShape; export type RunStreamCallback = ( run: RunShape @@ -99,7 +100,7 @@ export interface StreamSubscription { } export interface StreamSubscriptionFactory { - createSubscription(runId: string, streamKey: string): StreamSubscription; + createSubscription(runId: string, streamKey: string, baseUrl?: string): StreamSubscription; } // Real implementation for production @@ -145,8 +146,8 @@ export class SSEStreamSubscriptionFactory implements StreamSubscriptionFactory { private options: { headers?: Record; signal?: AbortSignal } ) {} - createSubscription(runId: string, streamKey: string): StreamSubscription { - const url = `${this.baseUrl}/realtime/v1/streams/${runId}/${streamKey}`; + createSubscription(runId: string, streamKey: string, baseUrl?: string): StreamSubscription { + const url = `${baseUrl ?? this.baseUrl}/realtime/v1/streams/${runId}/${streamKey}`; return new SSEStreamSubscription(url, this.options); } } @@ -243,7 +244,8 @@ export class RunSubscription { const subscription = this.options.streamFactory.createSubscription( run.id, - streamKey.toString() + streamKey.toString(), + this.options.client?.baseUrl ); await subscription.subscribe(async (chunk) => { diff --git a/packages/react-hooks/package.json b/packages/react-hooks/package.json index ab5de1622a..d6050309a0 100644 --- a/packages/react-hooks/package.json +++ b/packages/react-hooks/package.json @@ -74,4 +74,4 @@ "main": "./dist/commonjs/index.js", "types": "./dist/commonjs/index.d.ts", "module": "./dist/esm/index.js" -} +} \ No newline at end of file diff --git a/packages/react-hooks/src/contexts.tsx b/packages/react-hooks/src/contexts.tsx index c4e14e68a2..82b958b6c1 100644 --- a/packages/react-hooks/src/contexts.tsx +++ b/packages/react-hooks/src/contexts.tsx @@ -4,7 +4,7 @@ import React from "react"; import { createContextAndHook } from "./utils/createContextAndHook.js"; import type { ApiClientConfiguration } from "@trigger.dev/core/v3"; -const [TriggerAuthContext, useTriggerAuthContext] = +const [TriggerAuthContext, useTriggerAuthContext, useTriggerAuthContextOptional] = createContextAndHook("TriggerAuthContext"); -export { TriggerAuthContext, useTriggerAuthContext }; +export { TriggerAuthContext, useTriggerAuthContext, useTriggerAuthContextOptional }; diff --git a/packages/react-hooks/src/hooks/useApiClient.ts b/packages/react-hooks/src/hooks/useApiClient.ts index 4ecf638b14..94f4020baf 100644 --- a/packages/react-hooks/src/hooks/useApiClient.ts +++ b/packages/react-hooks/src/hooks/useApiClient.ts @@ -1,16 +1,28 @@ "use client"; -import { ApiClient } from "@trigger.dev/core/v3"; -import { useTriggerAuthContext } from "../contexts.js"; +import { ApiClient, ApiRequestOptions } from "@trigger.dev/core/v3"; +import { useTriggerAuthContextOptional } from "../contexts.js"; -export function useApiClient() { - const auth = useTriggerAuthContext(); +export type UseApiClientOptions = { + accessToken?: string; + baseURL?: string; + requestOptions?: ApiRequestOptions; +}; - const baseUrl = auth.baseURL ?? "https://api.trigger.dev"; +export function useApiClient(options?: UseApiClientOptions): ApiClient { + const auth = useTriggerAuthContextOptional(); - if (!auth.accessToken) { - throw new Error("Missing accessToken in TriggerAuthContext"); + const baseUrl = auth?.baseURL ?? options?.baseURL ?? "https://api.trigger.dev"; + const accessToken = auth?.accessToken ?? options?.accessToken; + + if (!accessToken) { + throw new Error("Missing accessToken in TriggerAuthContext or useApiClient options"); } - return new ApiClient(baseUrl, auth.accessToken, auth.requestOptions); + const requestOptions: ApiRequestOptions = { + ...auth?.requestOptions, + ...options?.requestOptions, + }; + + return new ApiClient(baseUrl, accessToken, requestOptions); } diff --git a/packages/react-hooks/src/hooks/useRealtime.ts b/packages/react-hooks/src/hooks/useRealtime.ts new file mode 100644 index 0000000000..7bca7f1f83 --- /dev/null +++ b/packages/react-hooks/src/hooks/useRealtime.ts @@ -0,0 +1,507 @@ +"use client"; + +import { AnyTask, ApiClient, InferRunTypes, RealtimeRun } from "@trigger.dev/core/v3"; +import { useCallback, useEffect, useId, useRef, useState } from "react"; +import { throttle } from "../utils/throttle.js"; +import { KeyedMutator, useSWR } from "../utils/trigger-swr.js"; +import { useApiClient, UseApiClientOptions } from "./useApiClient.js"; + +export type UseRealtimeRunOptions = UseApiClientOptions & { + id?: string; + enabled?: boolean; + throttleInMs?: number; +}; + +export type UseRealtimeRunInstance = { + run: RealtimeRun | undefined; + + error: Error | undefined; + + /** + * Abort the current request immediately. + */ + stop: () => void; +}; + +/** + * hook to subscribe to realtime updates of a task run. + * + * @template TTask - The type of the task. + * @param {string} runId - The unique identifier of the run to subscribe to. + * @returns {{ run: RealtimeRun | undefined, error: Error | null }} An object containing the current state of the run and any error encountered. + * + * @example + * ```ts + * import type { myTask } from './path/to/task'; + * const { run, error } = useRealtimeRun('run-id-123'); + * ``` + */ +export function useRealtimeRun( + runId: string, + options?: UseRealtimeRunOptions +): UseRealtimeRunInstance { + const hookId = useId(); + const idKey = options?.id ?? hookId; + + // Store the streams state in SWR, using the idKey as the key to share states. + const { data: run, mutate: mutateRun } = useSWR>([idKey, "run"], null); + + // Keep the latest streams in a ref. + const runRef = useRef | undefined>(); + useEffect(() => { + runRef.current = run; + }, [run]); + + const { data: error = undefined, mutate: setError } = useSWR( + [idKey, "error"], + null + ); + + // Abort controller to cancel the current API call. + const abortControllerRef = useRef(null); + + const stop = useCallback(() => { + if (abortControllerRef.current) { + abortControllerRef.current.abort(); + abortControllerRef.current = null; + } + }, []); + + const apiClient = useApiClient(options); + + const triggerRequest = useCallback(async () => { + try { + const abortController = new AbortController(); + abortControllerRef.current = abortController; + + await processRealtimeRun( + runId, + apiClient, + throttle(mutateRun, options?.throttleInMs), + abortControllerRef + ); + } catch (err) { + // Ignore abort errors as they are expected. + if ((err as any).name === "AbortError") { + abortControllerRef.current = null; + return; + } + + setError(err as Error); + } + }, [runId, mutateRun, abortControllerRef, apiClient, setError]); + + useEffect(() => { + if (typeof options?.enabled === "boolean" && !options.enabled) { + return; + } + + triggerRequest().finally(() => {}); + + return () => { + stop(); + }; + }, [runId, stop, options?.enabled]); + + return { run, error, stop }; +} + +export type StreamResults> = { + [K in keyof TStreams]: Array; +}; + +export type UseRealtimeRunWithStreamsInstance< + TTask extends AnyTask = AnyTask, + TStreams extends Record = Record, +> = { + run: RealtimeRun | undefined; + + streams: StreamResults; + + error: Error | undefined; + + /** + * Abort the current request immediately, keep the generated tokens if any. + */ + stop: () => void; +}; + +export function useRealtimeRunWithStreams< + TTask extends AnyTask = AnyTask, + TStreams extends Record = Record, +>( + runId: string, + options?: UseRealtimeRunOptions +): UseRealtimeRunWithStreamsInstance { + const hookId = useId(); + const idKey = options?.id ?? hookId; + + const [initialStreamsFallback] = useState({} as StreamResults); + + // Store the streams state in SWR, using the idKey as the key to share states. + const { data: streams, mutate: mutateStreams } = useSWR>( + [idKey, "streams"], + null, + { + fallbackData: initialStreamsFallback, + } + ); + + // Keep the latest streams in a ref. + const streamsRef = useRef>(streams ?? ({} as StreamResults)); + useEffect(() => { + streamsRef.current = streams || ({} as StreamResults); + }, [streams]); + + // Store the streams state in SWR, using the idKey as the key to share states. + const { data: run, mutate: mutateRun } = useSWR>([idKey, "run"], null); + + // Keep the latest streams in a ref. + const runRef = useRef | undefined>(); + useEffect(() => { + runRef.current = run; + }, [run]); + + const { data: error = undefined, mutate: setError } = useSWR( + [idKey, "error"], + null + ); + + // Abort controller to cancel the current API call. + const abortControllerRef = useRef(null); + + const stop = useCallback(() => { + if (abortControllerRef.current) { + abortControllerRef.current.abort(); + abortControllerRef.current = null; + } + }, []); + + const apiClient = useApiClient(options); + + const triggerRequest = useCallback(async () => { + try { + const abortController = new AbortController(); + abortControllerRef.current = abortController; + + await processRealtimeRunWithStreams( + runId, + apiClient, + throttle(mutateRun, options?.throttleInMs), + throttle(mutateStreams, options?.throttleInMs), + streamsRef, + abortControllerRef + ); + } catch (err) { + // Ignore abort errors as they are expected. + if ((err as any).name === "AbortError") { + abortControllerRef.current = null; + return; + } + + setError(err as Error); + } + }, [runId, mutateRun, mutateStreams, streamsRef, abortControllerRef, apiClient, setError]); + + useEffect(() => { + if (typeof options?.enabled === "boolean" && !options.enabled) { + return; + } + + triggerRequest().finally(() => {}); + + return () => { + stop(); + }; + }, [runId, stop, options?.enabled]); + + return { run, streams: streams ?? initialStreamsFallback, error, stop }; +} + +export type UseRealtimeRunsInstance = { + runs: RealtimeRun[]; + + error: Error | undefined; + + /** + * Abort the current request immediately. + */ + stop: () => void; +}; + +export function useRealtimeRunsWithTag( + tag: string | string[], + options?: UseRealtimeRunOptions +): UseRealtimeRunsInstance { + const hookId = useId(); + const idKey = options?.id ?? hookId; + + // Store the streams state in SWR, using the idKey as the key to share states. + const { data: runs, mutate: mutateRuns } = useSWR[]>([idKey, "run"], null, { + fallbackData: [], + }); + + // Keep the latest streams in a ref. + const runsRef = useRef[]>([]); + useEffect(() => { + runsRef.current = runs ?? []; + }, [runs]); + + const { data: error = undefined, mutate: setError } = useSWR( + [idKey, "error"], + null + ); + + // Abort controller to cancel the current API call. + const abortControllerRef = useRef(null); + + const stop = useCallback(() => { + if (abortControllerRef.current) { + abortControllerRef.current.abort(); + abortControllerRef.current = null; + } + }, []); + + const apiClient = useApiClient(options); + + const triggerRequest = useCallback(async () => { + try { + const abortController = new AbortController(); + abortControllerRef.current = abortController; + + await processRealtimeRunsWithTag( + tag, + apiClient, + throttle(mutateRuns, options?.throttleInMs), + runsRef, + abortControllerRef + ); + } catch (err) { + // Ignore abort errors as they are expected. + if ((err as any).name === "AbortError") { + abortControllerRef.current = null; + return; + } + + setError(err as Error); + } + }, [tag, mutateRuns, runsRef, abortControllerRef, apiClient, setError]); + + useEffect(() => { + if (typeof options?.enabled === "boolean" && !options.enabled) { + return; + } + + triggerRequest().finally(() => {}); + + return () => { + stop(); + }; + }, [tag, stop, options?.enabled]); + + return { runs: runs ?? [], error, stop }; +} + +/** + * hook to subscribe to realtime updates of a batch of task runs. + * + * @template TTask - The type of the task. + * @param {string} batchId - The unique identifier of the batch to subscribe to. + * @returns {{ runs: RealtimeRun[], error: Error | null }} An object containing the current state of the runs and any error encountered. + * + * @example + * + * ```ts + * import type { myTask } from './path/to/task'; + * const { runs, error } = useRealtimeBatch('batch-id-123'); + * ``` + */ +export function useRealtimeBatch( + batchId: string, + options?: UseRealtimeRunOptions +): UseRealtimeRunsInstance { + const hookId = useId(); + const idKey = options?.id ?? hookId; + + // Store the streams state in SWR, using the idKey as the key to share states. + const { data: runs, mutate: mutateRuns } = useSWR[]>([idKey, "run"], null, { + fallbackData: [], + }); + + // Keep the latest streams in a ref. + const runsRef = useRef[]>([]); + useEffect(() => { + runsRef.current = runs ?? []; + }, [runs]); + + const { data: error = undefined, mutate: setError } = useSWR( + [idKey, "error"], + null + ); + + // Abort controller to cancel the current API call. + const abortControllerRef = useRef(null); + + const stop = useCallback(() => { + if (abortControllerRef.current) { + abortControllerRef.current.abort(); + abortControllerRef.current = null; + } + }, []); + + const apiClient = useApiClient(options); + + const triggerRequest = useCallback(async () => { + try { + const abortController = new AbortController(); + abortControllerRef.current = abortController; + + await processRealtimeBatch( + batchId, + apiClient, + throttle(mutateRuns, options?.throttleInMs), + runsRef, + abortControllerRef + ); + } catch (err) { + // Ignore abort errors as they are expected. + if ((err as any).name === "AbortError") { + abortControllerRef.current = null; + return; + } + + setError(err as Error); + } + }, [batchId, mutateRuns, runsRef, abortControllerRef, apiClient, setError]); + + useEffect(() => { + if (typeof options?.enabled === "boolean" && !options.enabled) { + return; + } + + triggerRequest().finally(() => {}); + + return () => { + stop(); + }; + }, [batchId, stop, options?.enabled]); + + return { runs: runs ?? [], error, stop }; +} + +async function processRealtimeBatch( + batchId: string, + apiClient: ApiClient, + mutateRunsData: KeyedMutator[]>, + existingRunsRef: React.MutableRefObject[]>, + abortControllerRef: React.MutableRefObject +) { + const subscription = apiClient.subscribeToBatch>(batchId, { + signal: abortControllerRef.current?.signal, + }); + + for await (const part of subscription) { + mutateRunsData(insertRunShapeInOrder(existingRunsRef.current, part)); + } +} + +// Inserts and then orders by the run number, and ensures that the run is not duplicated +function insertRunShapeInOrder( + previousRuns: RealtimeRun[], + run: RealtimeRun +) { + const existingRun = previousRuns.find((r) => r.id === run.id); + if (existingRun) { + return previousRuns.map((r) => (r.id === run.id ? run : r)); + } + + const runNumber = run.number; + const index = previousRuns.findIndex((r) => r.number > runNumber); + if (index === -1) { + return [...previousRuns, run]; + } + + return [...previousRuns.slice(0, index), run, ...previousRuns.slice(index)]; +} + +async function processRealtimeRunsWithTag( + tag: string | string[], + apiClient: ApiClient, + mutateRunsData: KeyedMutator[]>, + existingRunsRef: React.MutableRefObject[]>, + abortControllerRef: React.MutableRefObject +) { + const subscription = apiClient.subscribeToRunsWithTag>(tag, { + signal: abortControllerRef.current?.signal, + }); + + for await (const part of subscription) { + mutateRunsData(insertRunShape(existingRunsRef.current, part)); + } +} + +// Replaces or inserts a run shape, ordered by the createdAt timestamp +function insertRunShape( + previousRuns: RealtimeRun[], + run: RealtimeRun +) { + const existingRun = previousRuns.find((r) => r.id === run.id); + if (existingRun) { + return previousRuns.map((r) => (r.id === run.id ? run : r)); + } + + const createdAt = run.createdAt; + + const index = previousRuns.findIndex((r) => r.createdAt > createdAt); + + if (index === -1) { + return [...previousRuns, run]; + } + + return [...previousRuns.slice(0, index), run, ...previousRuns.slice(index)]; +} + +async function processRealtimeRunWithStreams< + TTask extends AnyTask = AnyTask, + TStreams extends Record = Record, +>( + runId: string, + apiClient: ApiClient, + mutateRunData: KeyedMutator>, + mutateStreamData: KeyedMutator>, + existingDataRef: React.MutableRefObject>, + abortControllerRef: React.MutableRefObject +) { + const subscription = apiClient.subscribeToRun>(runId, { + signal: abortControllerRef.current?.signal, + }); + + for await (const part of subscription.withStreams()) { + if (part.type === "run") { + mutateRunData(part.run); + } else { + const nextStreamData = { + ...existingDataRef.current, + // @ts-ignore + [part.type]: [...(existingDataRef.current[part.type] || []), part.chunk], + }; + + mutateStreamData(nextStreamData); + } + } +} + +async function processRealtimeRun( + runId: string, + apiClient: ApiClient, + mutateRunData: KeyedMutator>, + abortControllerRef: React.MutableRefObject +) { + const subscription = apiClient.subscribeToRun>(runId, { + signal: abortControllerRef.current?.signal, + }); + + for await (const part of subscription) { + mutateRunData(part); + } +} diff --git a/packages/react-hooks/src/hooks/useRealtimeBatch.ts b/packages/react-hooks/src/hooks/useRealtimeBatch.ts deleted file mode 100644 index e50ab8bce7..0000000000 --- a/packages/react-hooks/src/hooks/useRealtimeBatch.ts +++ /dev/null @@ -1,66 +0,0 @@ -"use client"; - -import { AnyTask, InferRunTypes, TaskRunShape } from "@trigger.dev/core/v3"; -import { useEffect, useState } from "react"; -import { useApiClient } from "./useApiClient.js"; - -/** - * hook to subscribe to realtime updates of a batch of task runs. - * - * @template TTask - The type of the task. - * @param {string} batchId - The unique identifier of the batch to subscribe to. - * @returns {{ runs: TaskRunShape[], error: Error | null }} An object containing the current state of the runs and any error encountered. - * - * @example - * - * ```ts - * import type { myTask } from './path/to/task'; - * const { runs, error } = useRealtimeBatch('batch-id-123'); - * ``` - */ -export function useRealtimeBatch(batchId: string) { - const [runShapes, setRunShapes] = useState[]>([]); - const [error, setError] = useState(null); - const apiClient = useApiClient(); - - useEffect(() => { - const subscription = apiClient.subscribeToBatch>(batchId); - - async function iterateUpdates() { - for await (const run of subscription) { - setRunShapes((prevRuns) => { - return insertRunShapeInOrder(prevRuns, run); - }); - } - } - - iterateUpdates().catch((err) => { - setError(err); - }); - - return () => { - subscription.unsubscribe(); - }; - }, [batchId]); - - return { runs: runShapes, error }; -} - -// Inserts and then orders by the run number, and ensures that the run is not duplicated -function insertRunShapeInOrder( - previousRuns: TaskRunShape[], - run: TaskRunShape -) { - const existingRun = previousRuns.find((r) => r.id === run.id); - if (existingRun) { - return previousRuns.map((r) => (r.id === run.id ? run : r)); - } - - const runNumber = run.number; - const index = previousRuns.findIndex((r) => r.number > runNumber); - if (index === -1) { - return [...previousRuns, run]; - } - - return [...previousRuns.slice(0, index), run, ...previousRuns.slice(index)]; -} diff --git a/packages/react-hooks/src/hooks/useRealtimeRun.ts b/packages/react-hooks/src/hooks/useRealtimeRun.ts deleted file mode 100644 index 6aa8fe9d9d..0000000000 --- a/packages/react-hooks/src/hooks/useRealtimeRun.ts +++ /dev/null @@ -1,46 +0,0 @@ -"use client"; - -import { AnyTask, InferRunTypes, TaskRunShape } from "@trigger.dev/core/v3"; -import { useEffect, useState } from "react"; -import { useApiClient } from "./useApiClient.js"; - -/** - * hook to subscribe to realtime updates of a task run. - * - * @template TTask - The type of the task. - * @param {string} runId - The unique identifier of the run to subscribe to. - * @returns {{ run: TaskRunShape | undefined, error: Error | null }} An object containing the current state of the run and any error encountered. - * - * @example - * ```ts - * import type { myTask } from './path/to/task'; - * const { run, error } = useRealtimeRun('run-id-123'); - * ``` - */ -export function useRealtimeRun( - runId: string -): { run: TaskRunShape | undefined; error: Error | null } { - const [runShape, setRunShape] = useState | undefined>(undefined); - const [error, setError] = useState(null); - const apiClient = useApiClient(); - - useEffect(() => { - const subscription = apiClient.subscribeToRun>(runId); - - async function iterateUpdates() { - for await (const run of subscription) { - setRunShape(run); - } - } - - iterateUpdates().catch((err) => { - setError(err); - }); - - return () => { - subscription.unsubscribe(); - }; - }, [runId]); - - return { run: runShape, error }; -} diff --git a/packages/react-hooks/src/hooks/useRealtimeRunsWithTag.ts b/packages/react-hooks/src/hooks/useRealtimeRunsWithTag.ts deleted file mode 100644 index 32b15e7924..0000000000 --- a/packages/react-hooks/src/hooks/useRealtimeRunsWithTag.ts +++ /dev/null @@ -1,58 +0,0 @@ -"use client"; - -import { AnyTask, InferRunTypes, TaskRunShape } from "@trigger.dev/core/v3"; -import { useEffect, useState } from "react"; -import { useApiClient } from "./useApiClient.js"; - -export function useRealtimeRunsWithTag(tag: string | string[]) { - const [runShapes, setRunShapes] = useState[]>([]); - const [error, setError] = useState(null); - const apiClient = useApiClient(); - - useEffect(() => { - const subscription = apiClient.subscribeToRunsWithTag>(tag); - - async function iterateUpdates() { - for await (const run of subscription) { - setRunShapes((prevRuns) => { - return insertRunShape(prevRuns, run); - }); - } - } - - iterateUpdates().catch((err) => { - setError(err); - }); - - return () => { - subscription.unsubscribe(); - }; - }, [tag]); - - return { runs: runShapes, error }; -} - -function stableSortTags(tag: string | string[]) { - return Array.isArray(tag) ? tag.slice().sort() : [tag]; -} - -// Replaces or inserts a run shape, ordered by the createdAt timestamp -function insertRunShape( - previousRuns: TaskRunShape[], - run: TaskRunShape -) { - const existingRun = previousRuns.find((r) => r.id === run.id); - if (existingRun) { - return previousRuns.map((r) => (r.id === run.id ? run : r)); - } - - const createdAt = run.createdAt; - - const index = previousRuns.findIndex((r) => r.createdAt > createdAt); - - if (index === -1) { - return [...previousRuns, run]; - } - - return [...previousRuns.slice(0, index), run, ...previousRuns.slice(index)]; -} diff --git a/packages/react-hooks/src/index.ts b/packages/react-hooks/src/index.ts index 560fb37660..7a3a0ae200 100644 --- a/packages/react-hooks/src/index.ts +++ b/packages/react-hooks/src/index.ts @@ -1,7 +1,5 @@ export * from "./contexts.js"; export * from "./hooks/useApiClient.js"; export * from "./hooks/useRun.js"; -export * from "./hooks/useRealtimeRun.js"; -export * from "./hooks/useRealtimeRunsWithTag.js"; -export * from "./hooks/useRealtimeBatch.js"; +export * from "./hooks/useRealtime.js"; export * from "./hooks/useTaskTrigger.js"; diff --git a/packages/react-hooks/src/utils/throttle.ts b/packages/react-hooks/src/utils/throttle.ts new file mode 100644 index 0000000000..0101ebf387 --- /dev/null +++ b/packages/react-hooks/src/utils/throttle.ts @@ -0,0 +1,5 @@ +import throttleFunction from "throttleit"; + +export function throttle any>(fn: T, waitMs: number | undefined): T { + return waitMs != null ? throttleFunction(fn, waitMs) : fn; +} diff --git a/packages/trigger-sdk/src/v3/index.ts b/packages/trigger-sdk/src/v3/index.ts index cc899eab85..67d6a27234 100644 --- a/packages/trigger-sdk/src/v3/index.ts +++ b/packages/trigger-sdk/src/v3/index.ts @@ -39,6 +39,7 @@ export { type RunShape, type AnyRunShape, type TaskRunShape, + type RealtimeRun, type RetrieveRunResult, type AnyRetrieveRunResult, } from "./runs.js"; diff --git a/packages/trigger-sdk/src/v3/runs.ts b/packages/trigger-sdk/src/v3/runs.ts index 3f3d16a24b..f212dc36d1 100644 --- a/packages/trigger-sdk/src/v3/runs.ts +++ b/packages/trigger-sdk/src/v3/runs.ts @@ -8,8 +8,8 @@ import type { RescheduleRunRequestBody, RetrieveRunResult, RunShape, + RealtimeRun, RunSubscription, - SubscribeToRunsQueryParams, TaskRunShape, } from "@trigger.dev/core/v3"; import { @@ -29,7 +29,14 @@ import { resolvePresignedPacketUrl } from "@trigger.dev/core/v3/utils/ioSerializ import { AnyRunHandle, AnyTask } from "./shared.js"; import { tracer } from "./tracer.js"; -export type { AnyRetrieveRunResult, AnyRunShape, RetrieveRunResult, RunShape, TaskRunShape }; +export type { + AnyRetrieveRunResult, + AnyRunShape, + RetrieveRunResult, + RunShape, + TaskRunShape, + RealtimeRun, +}; export const runs = { replay: replayRun, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index df1782fc08..6fe49c9810 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1401,6 +1401,9 @@ importers: swr: specifier: ^2.2.5 version: 2.2.5(react@18.3.1) + throttleit: + specifier: ^2.1.0 + version: 2.1.0 devDependencies: '@arethetypeswrong/cli': specifier: ^0.15.4 diff --git a/references/nextjs-realtime/src/app/ai/[id]/ClientAiDetails.tsx b/references/nextjs-realtime/src/app/ai/[id]/ClientAiDetails.tsx new file mode 100644 index 0000000000..22a558d304 --- /dev/null +++ b/references/nextjs-realtime/src/app/ai/[id]/ClientAiDetails.tsx @@ -0,0 +1,84 @@ +"use client"; + +import { Card, CardContent, CardFooter } from "@/components/ui/card"; +import type { openaiStreaming, STREAMS } from "@/trigger/ai"; +import { useRealtimeRunWithStreams } from "@trigger.dev/react-hooks"; + +function AiRunDetailsWrapper({ runId, accessToken }: { runId: string; accessToken: string }) { + const { run, streams, error } = useRealtimeRunWithStreams( + runId, + { + throttleInMs: 500, + accessToken, + baseURL: process.env.NEXT_PUBLIC_TRIGGER_API_URL, + } + ); + + if (error) { + return ( +
+ + +

Error: {error.message}

+
+
+
+ ); + } + + if (!run) { + return ( +
+ + +

Loading run details…

+
+
+
+ ); + } + + const toolCall = streams.openai?.find( + (stream) => stream.type === "tool-call" && stream.toolName === "getWeather" + ); + const toolResult = streams.openai?.find((stream) => stream.type === "tool-result"); + const textDeltas = streams.openai?.filter((stream) => stream.type === "text-delta"); + + const text = textDeltas?.map((delta) => delta.textDelta).join(""); + const weatherLocation = toolCall ? toolCall.args.location : undefined; + const weather = toolResult ? toolResult.result.temperature : undefined; + + return ( +
+ + +
+ {weather ? ( +

{text || "Preparing weather report..."}

+ ) : ( +

Fetching weather data...

+ )} +
+
+ {weather && ( + +

+ Tool Call: The current temperature in{" "} + {weatherLocation} is {weather}. +

+
+ )} +
+
+ ); +} + +export default function ClientAiDetails({ + runId, + publicAccessToken, +}: { + runId: string; + publicAccessToken: string; +}) { + return ; +} diff --git a/references/nextjs-realtime/src/app/ai/[id]/page.tsx b/references/nextjs-realtime/src/app/ai/[id]/page.tsx new file mode 100644 index 0000000000..55f1cb32b7 --- /dev/null +++ b/references/nextjs-realtime/src/app/ai/[id]/page.tsx @@ -0,0 +1,15 @@ +import ClientAiDetails from "./ClientAiDetails"; + +export default async function DetailsPage({ + params, + searchParams, +}: { + params: { id: string }; + searchParams: { publicAccessToken: string }; +}) { + return ( +
+ +
+ ); +} diff --git a/references/nextjs-realtime/src/components/TriggerButton.tsx b/references/nextjs-realtime/src/components/TriggerButton.tsx index c0d1d56885..ef21d33156 100644 --- a/references/nextjs-realtime/src/components/TriggerButton.tsx +++ b/references/nextjs-realtime/src/components/TriggerButton.tsx @@ -3,11 +3,18 @@ import { Button } from "@/components/ui/button"; import { type openaiStreaming } from "@/trigger/ai"; import { TriggerAuthContext, useTaskTrigger } from "@trigger.dev/react-hooks"; +import { useRouter } from "next/navigation"; +import { useEffect } from "react"; function TriggerButton() { const { submit, handle, isLoading } = useTaskTrigger("openai-streaming"); + const router = useRouter(); - console.log(handle); + useEffect(() => { + if (handle) { + router.push(`/ai/${handle.id}?publicAccessToken=${handle.publicAccessToken}`); + } + }, [handle]); return ( ); } - -export default function TriggerButtonClientWrapper({ - publicAccessToken, -}: { - publicAccessToken: string; -}) { - return ( - - - - ); -} diff --git a/references/nextjs-realtime/src/components/TriggerButtonWithStreaming.tsx b/references/nextjs-realtime/src/components/TriggerButtonWithStreaming.tsx new file mode 100644 index 0000000000..38ee631287 --- /dev/null +++ b/references/nextjs-realtime/src/components/TriggerButtonWithStreaming.tsx @@ -0,0 +1,84 @@ +"use client"; + +import { Button } from "@/components/ui/button"; +import type { STREAMS, openaiStreaming } from "@/trigger/ai"; +import { useRealtimeTaskTriggerWithStreams } from "@trigger.dev/react-hooks"; +import { useCallback, useState } from "react"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "./ui/dialog"; +import { Card, CardContent, CardFooter } from "./ui/card"; + +export default function TriggerButton({ accessToken }: { accessToken: string }) { + const [isOpen, setIsOpen] = useState(false); + + const { submit, isLoading, run, streams } = useRealtimeTaskTriggerWithStreams< + typeof openaiStreaming, + STREAMS + >("openai-streaming", { + accessToken, + baseURL: process.env.NEXT_PUBLIC_TRIGGER_API_URL, + }); + + const openWeatherReport = useCallback(() => { + setIsOpen(true); + submit({ + model: "gpt-4o-mini", + prompt: + "Based on the temperature, will I need to wear extra clothes today in San Fransico? Please be detailed.", + }); + }, []); + + console.log("run", run); + console.log("streams", streams); + + const toolCall = streams.openai?.find( + (stream) => stream.type === "tool-call" && stream.toolName === "getWeather" + ); + const toolResult = streams.openai?.find((stream) => stream.type === "tool-result"); + const textDeltas = streams.openai?.filter((stream) => stream.type === "text-delta"); + + const text = textDeltas?.map((delta) => delta.textDelta).join(""); + const weatherLocation = toolCall ? toolCall.args.location : undefined; + const weather = toolResult ? toolResult.result.temperature : undefined; + + return ( + + + + + + + {weatherLocation} Weather Report + Live weather update and city conditions + + + +
+ {weather ? ( +

{text || "Preparing weather report..."}

+ ) : ( +

Fetching weather data...

+ )} +
+
+ {weather && ( + +

+ Tool Call: The current temperature in{" "} + {weatherLocation} is {weather}. +

+
+ )} +
+
+
+ ); +} diff --git a/references/nextjs-realtime/src/components/ui/dialog.tsx b/references/nextjs-realtime/src/components/ui/dialog.tsx new file mode 100644 index 0000000000..b5aaef7444 --- /dev/null +++ b/references/nextjs-realtime/src/components/ui/dialog.tsx @@ -0,0 +1,121 @@ +"use client" + +import * as React from "react" +import * as DialogPrimitive from "@radix-ui/react-dialog" +import { cn } from "@/lib/utils" +import { Cross2Icon } from "@radix-ui/react-icons" + +const Dialog = DialogPrimitive.Root + +const DialogTrigger = DialogPrimitive.Trigger + +const DialogPortal = DialogPrimitive.Portal + +const DialogClose = DialogPrimitive.Close + +const DialogOverlay = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +DialogOverlay.displayName = DialogPrimitive.Overlay.displayName + +const DialogContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + + + {children} + + + Close + + + +)) +DialogContent.displayName = DialogPrimitive.Content.displayName + +const DialogHeader = ({ + className, + ...props +}: React.HTMLAttributes) => ( +
+) +DialogHeader.displayName = "DialogHeader" + +const DialogFooter = ({ + className, + ...props +}: React.HTMLAttributes) => ( +
+) +DialogFooter.displayName = "DialogFooter" + +const DialogTitle = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +DialogTitle.displayName = DialogPrimitive.Title.displayName + +const DialogDescription = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +DialogDescription.displayName = DialogPrimitive.Description.displayName + +export { + Dialog, + DialogPortal, + DialogOverlay, + DialogTrigger, + DialogClose, + DialogContent, + DialogHeader, + DialogFooter, + DialogTitle, + DialogDescription, +} From b67443d745d7c389c74aa94e1171b699873fefea Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 14 Nov 2024 14:46:54 +0000 Subject: [PATCH 13/31] Improve the stream throttling --- packages/react-hooks/src/hooks/useRealtime.ts | 74 +++++++++++-------- packages/react-hooks/src/utils/throttle.ts | 59 ++++++++++++++- .../components/TriggerButtonWithStreaming.tsx | 1 + 3 files changed, 100 insertions(+), 34 deletions(-) diff --git a/packages/react-hooks/src/hooks/useRealtime.ts b/packages/react-hooks/src/hooks/useRealtime.ts index c6ef188bb9..0f859a9981 100644 --- a/packages/react-hooks/src/hooks/useRealtime.ts +++ b/packages/react-hooks/src/hooks/useRealtime.ts @@ -2,9 +2,9 @@ import { AnyTask, ApiClient, InferRunTypes, RealtimeRun } from "@trigger.dev/core/v3"; import { useCallback, useEffect, useId, useRef, useState } from "react"; -import { throttle } from "../utils/throttle.js"; import { KeyedMutator, useSWR } from "../utils/trigger-swr.js"; import { useApiClient, UseApiClientOptions } from "./useApiClient.js"; +import { createThrottledQueue } from "../utils/throttle.js"; export type UseRealtimeRunOptions = UseApiClientOptions & { id?: string; @@ -78,12 +78,7 @@ export function useRealtimeRun( const abortController = new AbortController(); abortControllerRef.current = abortController; - await processRealtimeRun( - runId, - apiClient, - throttle(mutateRun, options?.experimental_throttleInMs), - abortControllerRef - ); + await processRealtimeRun(runId, apiClient, mutateRun, abortControllerRef); } catch (err) { // Ignore abort errors as they are expected. if ((err as any).name === "AbortError") { @@ -199,10 +194,11 @@ export function useRealtimeRunWithStreams< await processRealtimeRunWithStreams( runId, apiClient, - throttle(mutateRun, options?.experimental_throttleInMs), - throttle(mutateStreams, options?.experimental_throttleInMs), + mutateRun, + mutateStreams, streamsRef, - abortControllerRef + abortControllerRef, + options?.experimental_throttleInMs ); } catch (err) { // Ignore abort errors as they are expected. @@ -285,13 +281,7 @@ export function useRealtimeRunsWithTag( const abortController = new AbortController(); abortControllerRef.current = abortController; - await processRealtimeRunsWithTag( - tag, - apiClient, - throttle(mutateRuns, options?.experimental_throttleInMs), - runsRef, - abortControllerRef - ); + await processRealtimeRunsWithTag(tag, apiClient, mutateRuns, runsRef, abortControllerRef); } catch (err) { // Ignore abort errors as they are expected. if ((err as any).name === "AbortError") { @@ -372,13 +362,7 @@ export function useRealtimeBatch( const abortController = new AbortController(); abortControllerRef.current = abortController; - await processRealtimeBatch( - batchId, - apiClient, - throttle(mutateRuns, options?.experimental_throttleInMs), - runsRef, - abortControllerRef - ); + await processRealtimeBatch(batchId, apiClient, mutateRuns, runsRef, abortControllerRef); } catch (err) { // Ignore abort errors as they are expected. if ((err as any).name === "AbortError") { @@ -486,23 +470,51 @@ async function processRealtimeRunWithStreams< mutateRunData: KeyedMutator>, mutateStreamData: KeyedMutator>, existingDataRef: React.MutableRefObject>, - abortControllerRef: React.MutableRefObject + abortControllerRef: React.MutableRefObject, + throttleInMs?: number ) { const subscription = apiClient.subscribeToRun>(runId, { signal: abortControllerRef.current?.signal, }); + type StreamUpdate = { + type: keyof TStreams; + chunk: any; + }; + + const streamQueue = createThrottledQueue(async (updates) => { + const nextStreamData = { ...existingDataRef.current }; + + // Group updates by type + const updatesByType = updates.reduce( + (acc, update) => { + if (!acc[update.type]) { + acc[update.type] = []; + } + acc[update.type].push(update.chunk); + return acc; + }, + {} as Record + ); + + // Apply all updates + for (const [type, chunks] of Object.entries(updatesByType)) { + // @ts-ignore + nextStreamData[type] = [...(existingDataRef.current[type] || []), ...chunks]; + } + + await mutateStreamData(nextStreamData); + }, throttleInMs); + for await (const part of subscription.withStreams()) { if (part.type === "run") { mutateRunData(part.run); } else { - const nextStreamData = { - ...existingDataRef.current, + streamQueue.add({ + type: part.type, // @ts-ignore - [part.type]: [...(existingDataRef.current[part.type] || []), part.chunk], - }; - - mutateStreamData(nextStreamData); + chunk: part.chunk, + }); } } } diff --git a/packages/react-hooks/src/utils/throttle.ts b/packages/react-hooks/src/utils/throttle.ts index 0101ebf387..070beb37b6 100644 --- a/packages/react-hooks/src/utils/throttle.ts +++ b/packages/react-hooks/src/utils/throttle.ts @@ -1,5 +1,58 @@ -import throttleFunction from "throttleit"; +// Reusable throttle utility +export type ThrottledQueue = { + add: (item: T) => void; + flush: () => Promise; + isEmpty: () => boolean; +}; -export function throttle any>(fn: T, waitMs: number | undefined): T { - return waitMs != null ? throttleFunction(fn, waitMs) : fn; +export function createThrottledQueue( + onFlush: (items: T[]) => Promise, + throttleInMs?: number +): ThrottledQueue { + let queue: T[] = []; + let lastFlushTime = 0; + let flushPromise: Promise | null = null; + + const scheduleFlush = async () => { + // If no throttle specified or there's already a flush in progress, return + if (!throttleInMs) { + // Immediately flush when no throttling is specified + const itemsToFlush = [...queue]; + queue = []; + await onFlush(itemsToFlush); + return; + } + + if (queue.length === 0 || flushPromise) return; + + const now = Date.now(); + const timeUntilNextFlush = Math.max(0, lastFlushTime + throttleInMs - now); + + if (timeUntilNextFlush === 0) { + const itemsToFlush = [...queue]; + queue = []; + lastFlushTime = now; + flushPromise = onFlush(itemsToFlush).finally(() => { + flushPromise = null; + // Check if more items accumulated during flush + scheduleFlush(); + }); + } else { + setTimeout(scheduleFlush, timeUntilNextFlush); + } + }; + + return { + add: (item: T) => { + queue.push(item); + scheduleFlush(); + }, + flush: async () => { + if (queue.length === 0) return; + const itemsToFlush = [...queue]; + queue = []; + await onFlush(itemsToFlush); + }, + isEmpty: () => queue.length === 0, + }; } diff --git a/references/nextjs-realtime/src/components/TriggerButtonWithStreaming.tsx b/references/nextjs-realtime/src/components/TriggerButtonWithStreaming.tsx index 38ee631287..07895daf90 100644 --- a/references/nextjs-realtime/src/components/TriggerButtonWithStreaming.tsx +++ b/references/nextjs-realtime/src/components/TriggerButtonWithStreaming.tsx @@ -23,6 +23,7 @@ export default function TriggerButton({ accessToken }: { accessToken: string }) >("openai-streaming", { accessToken, baseURL: process.env.NEXT_PUBLIC_TRIGGER_API_URL, + experimental_throttleInMs: 100, }); const openWeatherReport = useCallback(() => { From 75fd83f4cd7cdbb72dd061695deed5541ac76a04 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 14 Nov 2024 14:49:26 +0000 Subject: [PATCH 14/31] Use the runId as the ID key to bust the cache after triggering --- packages/react-hooks/src/hooks/useTaskTrigger.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/react-hooks/src/hooks/useTaskTrigger.ts b/packages/react-hooks/src/hooks/useTaskTrigger.ts index 5c8304ae5a..2cc670422a 100644 --- a/packages/react-hooks/src/hooks/useTaskTrigger.ts +++ b/packages/react-hooks/src/hooks/useTaskTrigger.ts @@ -96,6 +96,7 @@ export function useRealtimeTaskTriggerWithStreams< const triggerInstance = useTaskTrigger(id, options); const realtimeInstance = useRealtimeRunWithStreams(triggerInstance.handle?.id, { ...options, + id: triggerInstance.handle?.id, accessToken: triggerInstance.handle?.publicAccessToken ?? options?.accessToken, }); From 54307b77fb749446a96aaf9c8d81dd2c57a496eb Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 14 Nov 2024 15:11:24 +0000 Subject: [PATCH 15/31] Upgrade to to the latest electric sql client and server --- apps/webapp/app/services/realtimeClient.server.ts | 5 +++-- apps/webapp/test/realtimeClient.test.ts | 14 ++++++++------ docker/docker-compose.yml | 2 +- internal-packages/testcontainers/src/utils.ts | 2 +- packages/core/package.json | 2 +- packages/core/src/v3/apiClient/stream.ts | 6 +++--- pnpm-lock.yaml | 8 ++++---- 7 files changed, 21 insertions(+), 18 deletions(-) diff --git a/apps/webapp/app/services/realtimeClient.server.ts b/apps/webapp/app/services/realtimeClient.server.ts index ad52906b45..e3e4b92eb9 100644 --- a/apps/webapp/app/services/realtimeClient.server.ts +++ b/apps/webapp/app/services/realtimeClient.server.ts @@ -70,7 +70,7 @@ export class RealtimeClient { #constructElectricUrl(url: URL | string, whereClause: string): URL { const $url = new URL(url.toString()); - const electricUrl = new URL(`${this.options.electricOrigin}/v1/shape/public."TaskRun"`); + const electricUrl = new URL(`${this.options.electricOrigin}/v1/shape`); // Copy over all the url search params to the electric url $url.searchParams.forEach((value, key) => { @@ -86,6 +86,7 @@ export class RealtimeClient { // }); electricUrl.searchParams.set("where", whereClause); + electricUrl.searchParams.set("table", 'public."TaskRun"'); return electricUrl; } @@ -231,7 +232,7 @@ export class RealtimeClient { } function extractShapeId(url: URL) { - return url.searchParams.get("shape_id"); + return url.searchParams.get("handle"); } function isLiveRequestUrl(url: URL) { diff --git a/apps/webapp/test/realtimeClient.test.ts b/apps/webapp/test/realtimeClient.test.ts index d581070821..5dddeaceab 100644 --- a/apps/webapp/test/realtimeClient.test.ts +++ b/apps/webapp/test/realtimeClient.test.ts @@ -86,8 +86,10 @@ describe("RealtimeClient", () => { const headers = Object.fromEntries(response.headers.entries()); - const shapeId = headers["electric-shape-id"]; - const chunkOffset = headers["electric-chunk-last-offset"]; + console.log(headers); + + const shapeId = headers["electric-handle"]; + const chunkOffset = headers["electric-offset"]; expect(response.status).toBe(200); expect(response2.status).toBe(200); @@ -96,7 +98,7 @@ describe("RealtimeClient", () => { // Okay, now we will do two live requests, and the second one should fail because of the concurrency limit const liveResponsePromise = client.streamRun( - `http://localhost:3000?offset=0_0&live=true&shape_id=${shapeId}`, + `http://localhost:3000?offset=0_0&live=true&handle=${shapeId}`, environment, run.id ); @@ -104,7 +106,7 @@ describe("RealtimeClient", () => { const liveResponsePromise2 = new Promise((resolve) => { setTimeout(async () => { const response = await client.streamRun( - `http://localhost:3000?offset=0_0&live=true&shape_id=${shapeId}`, + `http://localhost:3000?offset=0_0&live=true&handle=${shapeId}`, environment, run.id ); @@ -200,8 +202,8 @@ describe("RealtimeClient", () => { const headers = Object.fromEntries(response.headers.entries()); - const shapeId = headers["electric-shape-id"]; - const chunkOffset = headers["electric-chunk-last-offset"]; + const shapeId = headers["electric-handle"]; + const chunkOffset = headers["electric-offset"]; expect(response.status).toBe(200); expect(shapeId).toBeDefined(); diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index e221063e70..db535c2f54 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -61,7 +61,7 @@ services: - 6379:6379 electric: - image: electricsql/electric:0.7.5 + image: electricsql/electric:0.8.1 restart: always environment: DATABASE_URL: postgresql://postgres:postgres@database:5432/postgres?sslmode=disable diff --git a/internal-packages/testcontainers/src/utils.ts b/internal-packages/testcontainers/src/utils.ts index 343c538754..162ae81f8a 100644 --- a/internal-packages/testcontainers/src/utils.ts +++ b/internal-packages/testcontainers/src/utils.ts @@ -55,7 +55,7 @@ export async function createElectricContainer( network.getName() )}:5432/${postgresContainer.getDatabase()}?sslmode=disable`; - const container = await new GenericContainer("electricsql/electric:0.7.5") + const container = await new GenericContainer("electricsql/electric:0.8.1") .withExposedPorts(3000) .withNetwork(network) .withEnvironment({ diff --git a/packages/core/package.json b/packages/core/package.json index b236eac2ab..d2de234f18 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -182,7 +182,7 @@ "check-exports": "attw --pack ." }, "dependencies": { - "@electric-sql/client": "0.6.3", + "@electric-sql/client": "0.7.1", "@google-cloud/precise-date": "^4.0.0", "@jsonhero/path": "^1.0.21", "@opentelemetry/api": "1.9.0", diff --git a/packages/core/src/v3/apiClient/stream.ts b/packages/core/src/v3/apiClient/stream.ts index 87b703a1bd..646ee6aacd 100644 --- a/packages/core/src/v3/apiClient/stream.ts +++ b/packages/core/src/v3/apiClient/stream.ts @@ -23,14 +23,14 @@ export async function zodShapeStream( const shape = new Shape(stream); - const initialValue = await shape.value; + const initialRows = await shape.rows; - for (const shapeRow of initialValue.values()) { + for (const shapeRow of initialRows) { await callback(schema.parse(shapeRow)); } return shape.subscribe(async (newShape) => { - for (const shapeRow of newShape.values()) { + for (const shapeRow of newShape.rows) { await callback(schema.parse(shapeRow)); } }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4a1afd5399..a690166fc7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1263,8 +1263,8 @@ importers: packages/core: dependencies: '@electric-sql/client': - specifier: 0.6.3 - version: 0.6.3 + specifier: 0.7.1 + version: 0.7.1 '@google-cloud/precise-date': specifier: ^4.0.0 version: 4.0.0 @@ -4954,8 +4954,8 @@ packages: '@rollup/rollup-darwin-arm64': 4.21.3 dev: false - /@electric-sql/client@0.6.3: - resolution: {integrity: sha512-/AYkRrEASKIGcjtNp8IVJ3sAUm+IQ2l0NrGgDvvAG/n1+ifOl7kD1E4dRyg1qdY/b+HdKhGNYlNgsPuwMKO2Mg==} + /@electric-sql/client@0.7.1: + resolution: {integrity: sha512-NpKEn5hDSy+NaAdG9Ql8kIGfjrj/XfakJOOHTTutb99db3Dza0uUfnkqycFpyUAarFMQ4hYSKgx8AbOm1PCeFQ==} optionalDependencies: '@rollup/rollup-darwin-arm64': 4.21.3 dev: false From f08ea9bcdbf90964873e50b151e43a17aa14802a Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 14 Nov 2024 15:51:07 +0000 Subject: [PATCH 16/31] Make realtime server backwards compat with 3.1.2 release --- .../routes/realtime.v1.batches.$batchId.ts | 7 +- .../app/routes/realtime.v1.runs.$runId.ts | 7 +- apps/webapp/app/routes/realtime.v1.runs.ts | 7 +- .../app/services/realtimeClient.server.ts | 66 +++++--- apps/webapp/app/utils/longPollingFetch.ts | 26 ++- apps/webapp/test/realtimeClient.test.ts | 158 +++++++++++++++++- packages/core/src/v3/apiClient/stream.ts | 5 +- 7 files changed, 240 insertions(+), 36 deletions(-) diff --git a/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts b/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts index b4bf1cd1e8..1563af4d67 100644 --- a/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts +++ b/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts @@ -31,6 +31,11 @@ export const loader = createLoaderApiRoute( return json({ error: "Batch not found" }, { status: 404 }); } - return realtimeClient.streamBatch(request.url, authentication.environment, batchRun.id); + return realtimeClient.streamBatch( + request.url, + authentication.environment, + batchRun.id, + request.headers.get("x-trigger-electric-version") ?? undefined + ); } ); diff --git a/apps/webapp/app/routes/realtime.v1.runs.$runId.ts b/apps/webapp/app/routes/realtime.v1.runs.$runId.ts index 4ce369ccf4..d2cb44b718 100644 --- a/apps/webapp/app/routes/realtime.v1.runs.$runId.ts +++ b/apps/webapp/app/routes/realtime.v1.runs.$runId.ts @@ -31,6 +31,11 @@ export const loader = createLoaderApiRoute( return json({ error: "Run not found" }, { status: 404 }); } - return realtimeClient.streamRun(request.url, authentication.environment, run.id); + return realtimeClient.streamRun( + request.url, + authentication.environment, + run.id, + request.headers.get("x-trigger-electric-version") ?? undefined + ); } ); diff --git a/apps/webapp/app/routes/realtime.v1.runs.ts b/apps/webapp/app/routes/realtime.v1.runs.ts index d4a0170c61..b711b230fe 100644 --- a/apps/webapp/app/routes/realtime.v1.runs.ts +++ b/apps/webapp/app/routes/realtime.v1.runs.ts @@ -23,6 +23,11 @@ export const loader = createLoaderApiRoute( }, }, async ({ searchParams, authentication, request }) => { - return realtimeClient.streamRuns(request.url, authentication.environment, searchParams); + return realtimeClient.streamRuns( + request.url, + authentication.environment, + searchParams, + request.headers.get("x-trigger-electric-version") ?? undefined + ); } ); diff --git a/apps/webapp/app/services/realtimeClient.server.ts b/apps/webapp/app/services/realtimeClient.server.ts index e3e4b92eb9..c1e63292e0 100644 --- a/apps/webapp/app/services/realtimeClient.server.ts +++ b/apps/webapp/app/services/realtimeClient.server.ts @@ -37,18 +37,29 @@ export class RealtimeClient { this.#registerCommands(); } - async streamRun(url: URL | string, environment: RealtimeEnvironment, runId: string) { - return this.#streamRunsWhere(url, environment, `id='${runId}'`); + async streamRun( + url: URL | string, + environment: RealtimeEnvironment, + runId: string, + clientVersion?: string + ) { + return this.#streamRunsWhere(url, environment, `id='${runId}'`, clientVersion); } - async streamBatch(url: URL | string, environment: RealtimeEnvironment, batchId: string) { - return this.#streamRunsWhere(url, environment, `"batchId"='${batchId}'`); + async streamBatch( + url: URL | string, + environment: RealtimeEnvironment, + batchId: string, + clientVersion?: string + ) { + return this.#streamRunsWhere(url, environment, `"batchId"='${batchId}'`, clientVersion); } async streamRuns( url: URL | string, environment: RealtimeEnvironment, - params: RealtimeRunsParams + params: RealtimeRunsParams, + clientVersion?: string ) { const whereClauses: string[] = [`"runtimeEnvironmentId"='${environment.id}'`]; @@ -58,16 +69,21 @@ export class RealtimeClient { const whereClause = whereClauses.join(" AND "); - return this.#streamRunsWhere(url, environment, whereClause); + return this.#streamRunsWhere(url, environment, whereClause, clientVersion); } - async #streamRunsWhere(url: URL | string, environment: RealtimeEnvironment, whereClause: string) { - const electricUrl = this.#constructElectricUrl(url, whereClause); + async #streamRunsWhere( + url: URL | string, + environment: RealtimeEnvironment, + whereClause: string, + clientVersion?: string + ) { + const electricUrl = this.#constructElectricUrl(url, whereClause, clientVersion); - return this.#performElectricRequest(electricUrl, environment); + return this.#performElectricRequest(electricUrl, environment, clientVersion); } - #constructElectricUrl(url: URL | string, whereClause: string): URL { + #constructElectricUrl(url: URL | string, whereClause: string, clientVersion?: string): URL { const $url = new URL(url.toString()); const electricUrl = new URL(`${this.options.electricOrigin}/v1/shape`); @@ -77,36 +93,42 @@ export class RealtimeClient { electricUrl.searchParams.set(key, value); }); - // const electricParams = ["shape_id", "live", "offset", "columns", "cursor"]; - - // electricParams.forEach((param) => { - // if ($url.searchParams.has(param) && $url.searchParams.get(param)) { - // electricUrl.searchParams.set(param, $url.searchParams.get(param)!); - // } - // }); - electricUrl.searchParams.set("where", whereClause); electricUrl.searchParams.set("table", 'public."TaskRun"'); + if (!clientVersion) { + // If the client version is not provided, that means we're using an older client + // This means the client will be sending shape_id instead of handle + electricUrl.searchParams.set("handle", electricUrl.searchParams.get("shape_id") ?? ""); + } + return electricUrl; } - async #performElectricRequest(url: URL, environment: RealtimeEnvironment) { + async #performElectricRequest( + url: URL, + environment: RealtimeEnvironment, + clientVersion?: string + ) { const shapeId = extractShapeId(url); logger.debug("[realtimeClient] request", { url: url.toString(), }); + const rewriteResponseHeaders: Record = clientVersion + ? {} + : { "electric-handle": "electric-shape-id", "electric-offset": "electric-chunk-last-offset" }; + if (!shapeId) { // If the shapeId is not present, we're just getting the initial value - return longPollingFetch(url.toString()); + return longPollingFetch(url.toString(), {}, rewriteResponseHeaders); } const isLive = isLiveRequestUrl(url); if (!isLive) { - return longPollingFetch(url.toString()); + return longPollingFetch(url.toString(), {}, rewriteResponseHeaders); } const requestId = randomUUID(); @@ -148,7 +170,7 @@ export class RealtimeClient { try { // ... (rest of your existing code for the long polling request) - const response = await longPollingFetch(url.toString()); + const response = await longPollingFetch(url.toString(), {}, rewriteResponseHeaders); // Decrement the counter after the long polling request is complete await this.#decrementConcurrency(environment.id, requestId); diff --git a/apps/webapp/app/utils/longPollingFetch.ts b/apps/webapp/app/utils/longPollingFetch.ts index c070beb677..ec7e309180 100644 --- a/apps/webapp/app/utils/longPollingFetch.ts +++ b/apps/webapp/app/utils/longPollingFetch.ts @@ -6,7 +6,11 @@ import { logger } from "~/services/logger.server"; // Similar-ish problem to https://github.com/wintercg/fetch/issues/23 -export async function longPollingFetch(url: string, options?: RequestInit) { +export async function longPollingFetch( + url: string, + options?: RequestInit, + rewriteResponseHeaders?: Record +) { try { let response = await fetch(url, options); @@ -14,12 +18,32 @@ export async function longPollingFetch(url: string, options?: RequestInit) { const headers = new Headers(response.headers); headers.delete("content-encoding"); headers.delete("content-length"); + + response = new Response(response.body, { + headers, + status: response.status, + statusText: response.statusText, + }); + } + + if (rewriteResponseHeaders) { + const headers = new Headers(response.headers); + + for (const [fromKey, toKey] of Object.entries(rewriteResponseHeaders)) { + const value = headers.get(fromKey); + if (value) { + headers.set(toKey, value); + headers.delete(fromKey); + } + } + response = new Response(response.body, { headers, status: response.status, statusText: response.statusText, }); } + return response; } catch (error) { if (error instanceof TypeError) { diff --git a/apps/webapp/test/realtimeClient.test.ts b/apps/webapp/test/realtimeClient.test.ts index 5dddeaceab..de63a81dbc 100644 --- a/apps/webapp/test/realtimeClient.test.ts +++ b/apps/webapp/test/realtimeClient.test.ts @@ -64,7 +64,8 @@ describe("RealtimeClient", () => { const initialResponsePromise = client.streamRun( "http://localhost:3000?offset=-1", environment, - run.id + run.id, + "0.8.1" ); const initializeResponsePromise2 = new Promise((resolve) => { @@ -72,7 +73,8 @@ describe("RealtimeClient", () => { const response = await client.streamRun( "http://localhost:3000?offset=-1", environment, - run.id + run.id, + "0.8.1" ); resolve(response); @@ -86,8 +88,6 @@ describe("RealtimeClient", () => { const headers = Object.fromEntries(response.headers.entries()); - console.log(headers); - const shapeId = headers["electric-handle"]; const chunkOffset = headers["electric-offset"]; @@ -100,7 +100,8 @@ describe("RealtimeClient", () => { const liveResponsePromise = client.streamRun( `http://localhost:3000?offset=0_0&live=true&handle=${shapeId}`, environment, - run.id + run.id, + "0.8.1" ); const liveResponsePromise2 = new Promise((resolve) => { @@ -108,7 +109,8 @@ describe("RealtimeClient", () => { const response = await client.streamRun( `http://localhost:3000?offset=0_0&live=true&handle=${shapeId}`, environment, - run.id + run.id, + "0.8.1" ); resolve(response); @@ -196,9 +198,14 @@ describe("RealtimeClient", () => { }, }); - const response = await client.streamRuns("http://localhost:3000?offset=-1", environment, { - tags: ["test:tag:1234"], - }); + const response = await client.streamRuns( + "http://localhost:3000?offset=-1", + environment, + { + tags: ["test:tag:1234"], + }, + "0.8.1" + ); const headers = Object.fromEntries(response.headers.entries()); @@ -210,4 +217,137 @@ describe("RealtimeClient", () => { expect(chunkOffset).toBe("0_0"); } ); + + containerWithElectricTest( + "Should adapt for older client versions", + { timeout: 30_000 }, + async ({ redis, electricOrigin, prisma }) => { + const client = new RealtimeClient({ + electricOrigin, + keyPrefix: "test:realtime", + redis: redis.options, + expiryTimeInSeconds: 5, + cachedLimitProvider: { + async getCachedLimit() { + return 1; + }, + }, + }); + + const organization = await prisma.organization.create({ + data: { + title: "test-org", + slug: "test-org", + }, + }); + + const project = await prisma.project.create({ + data: { + name: "test-project", + slug: "test-project", + organizationId: organization.id, + externalRef: "test-project", + }, + }); + + const environment = await prisma.runtimeEnvironment.create({ + data: { + projectId: project.id, + organizationId: organization.id, + slug: "test", + type: "DEVELOPMENT", + shortcode: "1234", + apiKey: "tr_dev_1234", + pkApiKey: "pk_test_1234", + }, + }); + + const run = await prisma.taskRun.create({ + data: { + taskIdentifier: "test-task", + friendlyId: "run_1234", + payload: "{}", + payloadType: "application/json", + traceId: "trace_1234", + spanId: "span_1234", + queue: "test-queue", + projectId: project.id, + runtimeEnvironmentId: environment.id, + }, + }); + + const initialResponsePromise = client.streamRun( + "http://localhost:3000?offset=-1", + environment, + run.id + ); + + const initializeResponsePromise2 = new Promise((resolve) => { + setTimeout(async () => { + const response = await client.streamRun( + "http://localhost:3000?offset=-1", + environment, + run.id, + "0.8.1" + ); + + resolve(response); + }, 1); + }); + + const [response, response2] = await Promise.all([ + initialResponsePromise, + initializeResponsePromise2, + ]); + + const headers = Object.fromEntries(response.headers.entries()); + + const shapeId = headers["electric-shape-id"]; + const chunkOffset = headers["electric-chunk-last-offset"]; + + expect(response.status).toBe(200); + expect(response2.status).toBe(200); + expect(shapeId).toBeDefined(); + expect(chunkOffset).toBe("0_0"); + + // Okay, now we will do two live requests, and the second one should fail because of the concurrency limit + const liveResponsePromise = client.streamRun( + `http://localhost:3000?offset=0_0&live=true&shape_id=${shapeId}`, + environment, + run.id + ); + + const liveResponsePromise2 = new Promise((resolve) => { + setTimeout(async () => { + const response = await client.streamRun( + `http://localhost:3000?offset=0_0&live=true&shape_id=${shapeId}`, + environment, + run.id + ); + + resolve(response); + }, 1); + }); + + const updateRunAfter1SecondPromise = new Promise((resolve) => { + setTimeout(async () => { + await prisma.taskRun.update({ + where: { id: run.id }, + data: { metadata: "{}" }, + }); + + resolve(); + }, 1000); + }); + + const [liveResponse, liveResponse2] = await Promise.all([ + liveResponsePromise, + liveResponsePromise2, + updateRunAfter1SecondPromise, + ]); + + expect(liveResponse.status).toBe(200); + expect(liveResponse2.status).toBe(429); + } + ); }); diff --git a/packages/core/src/v3/apiClient/stream.ts b/packages/core/src/v3/apiClient/stream.ts index 646ee6aacd..82aeec3f1f 100644 --- a/packages/core/src/v3/apiClient/stream.ts +++ b/packages/core/src/v3/apiClient/stream.ts @@ -16,7 +16,10 @@ export async function zodShapeStream( const stream = new ShapeStream>({ url, - headers: options?.headers, + headers: { + ...options?.headers, + "x-trigger-electric-version": "0.8.1", + }, fetchClient: options?.fetchClient, signal: options?.signal, }); From d8296b42b20a1a4fe91777ea41cfa9af312dcc78 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Thu, 14 Nov 2024 19:59:14 +0000 Subject: [PATCH 17/31] Pass the runId into useRealtimeRun --- packages/react-hooks/src/hooks/useTaskTrigger.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/react-hooks/src/hooks/useTaskTrigger.ts b/packages/react-hooks/src/hooks/useTaskTrigger.ts index 2cc670422a..8528299461 100644 --- a/packages/react-hooks/src/hooks/useTaskTrigger.ts +++ b/packages/react-hooks/src/hooks/useTaskTrigger.ts @@ -119,6 +119,7 @@ export function useRealtimeTaskTrigger( const triggerInstance = useTaskTrigger(id, options); const realtimeInstance = useRealtimeRun(triggerInstance.handle?.id, { ...options, + id: triggerInstance.handle?.id, accessToken: triggerInstance.handle?.publicAccessToken ?? options?.accessToken, }); From 3d50d9c3337e3c5d916459337ccad06bdd6fb006 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 15 Nov 2024 16:14:59 +0000 Subject: [PATCH 18/31] Fix scopes when specifiying reading all runs --- packages/trigger-sdk/src/v3/auth.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/trigger-sdk/src/v3/auth.ts b/packages/trigger-sdk/src/v3/auth.ts index 3bf027ea44..c871b3b664 100644 --- a/packages/trigger-sdk/src/v3/auth.ts +++ b/packages/trigger-sdk/src/v3/auth.ts @@ -150,6 +150,8 @@ function flattenScopes(permissions: PublicTokenPermissions): string[] { } } else if (typeof value === "string") { flattenedPermissions.push(`${action}:${property}:${value}`); + } else if (typeof value === "boolean" && value) { + flattenedPermissions.push(`${action}:${property}`); } } } From a4ff0011f2814f9bd0ec5aa8ef5dd61745c32ae4 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Fri, 15 Nov 2024 16:15:15 +0000 Subject: [PATCH 19/31] WIP @trigger.dev/rsc package --- packages/cli-v3/src/build/extensions.ts | 6 + packages/core/package.json | 4 +- packages/core/src/v3/build/extensions.ts | 1 + packages/core/src/v3/runMetadata/index.ts | 2 +- packages/core/src/v3/runMetadata/manager.ts | 8 +- .../core/src/v3/runMetadata/metadataStream.ts | 2 - packages/core/src/v3/runMetadata/types.ts | 6 +- packages/rsc/CHANGELOG.md | 1 + packages/rsc/README.md | 1 + packages/rsc/package.json | 76 ++++++++++++ packages/rsc/src/build.ts | 108 +++++++++++++++++ packages/rsc/src/index.ts | 1 + packages/rsc/src/sourceDir-cjs.cts | 3 + packages/rsc/src/sourceDir.ts | 3 + packages/rsc/tsconfig.json | 10 ++ packages/trigger-sdk/src/v3/metadata.ts | 2 +- pnpm-lock.yaml | 76 ++++++++++-- references/nextjs-realtime/package.json | 1 + references/nextjs-realtime/src/app/page.tsx | 10 ++ .../nextjs-realtime/src/trigger/rsc.tsx | 112 ++++++++++++++++++ references/nextjs-realtime/trigger.config.ts | 4 + 21 files changed, 417 insertions(+), 20 deletions(-) create mode 100644 packages/rsc/CHANGELOG.md create mode 100644 packages/rsc/README.md create mode 100644 packages/rsc/package.json create mode 100644 packages/rsc/src/build.ts create mode 100644 packages/rsc/src/index.ts create mode 100644 packages/rsc/src/sourceDir-cjs.cts create mode 100644 packages/rsc/src/sourceDir.ts create mode 100644 packages/rsc/tsconfig.json create mode 100644 references/nextjs-realtime/src/trigger/rsc.tsx diff --git a/packages/cli-v3/src/build/extensions.ts b/packages/cli-v3/src/build/extensions.ts index 20ea79585d..8e0f9f0a4e 100644 --- a/packages/cli-v3/src/build/extensions.ts +++ b/packages/cli-v3/src/build/extensions.ts @@ -184,6 +184,12 @@ function applyLayerToManifest(layer: BuildLayer, manifest: BuildManifest): Build } } + if (layer.conditions) { + $manifest.customConditions ??= []; + $manifest.customConditions = $manifest.customConditions.concat(layer.conditions); + $manifest.customConditions = Array.from(new Set($manifest.customConditions)); + } + return $manifest; } diff --git a/packages/core/package.json b/packages/core/package.json index d2de234f18..d05d3c79aa 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -204,9 +204,9 @@ "nanoid": "^3.3.4", "socket.io-client": "4.7.5", "superjson": "^2.2.1", + "zod": "3.22.3", "zod-error": "1.5.0", - "zod-validation-error": "^1.5.0", - "zod": "3.22.3" + "zod-validation-error": "^1.5.0" }, "devDependencies": { "@ai-sdk/provider-utils": "^1.0.22", diff --git a/packages/core/src/v3/build/extensions.ts b/packages/core/src/v3/build/extensions.ts index 6b5fcf3d17..68ed3b5cee 100644 --- a/packages/core/src/v3/build/extensions.ts +++ b/packages/core/src/v3/build/extensions.ts @@ -65,6 +65,7 @@ export interface BuildLayer { override?: boolean; }; dependencies?: Record; + conditions?: string[]; } export type PluginPlacement = "first" | "last"; diff --git a/packages/core/src/v3/runMetadata/index.ts b/packages/core/src/v3/runMetadata/index.ts index bc7a766fc1..f594985006 100644 --- a/packages/core/src/v3/runMetadata/index.ts +++ b/packages/core/src/v3/runMetadata/index.ts @@ -67,7 +67,7 @@ export class RunMetadataAPI implements RunMetadataManager { public stream( key: string, - value: AsyncIterable, + value: AsyncIterable | ReadableStream, signal?: AbortSignal ): Promise> { return this.#getManager().stream(key, value, signal); diff --git a/packages/core/src/v3/runMetadata/manager.ts b/packages/core/src/v3/runMetadata/manager.ts index f4280ef916..45e208687c 100644 --- a/packages/core/src/v3/runMetadata/manager.ts +++ b/packages/core/src/v3/runMetadata/manager.ts @@ -164,11 +164,13 @@ export class StandardMetadataManager implements RunMetadataManager { public async stream( key: string, - value: AsyncIterable, + value: AsyncIterable | ReadableStream, signal?: AbortSignal ): Promise> { + const $value = value as AsyncIterable; + if (!this.runId) { - return value; + return $value; } // Add the key to the special stream metadata object @@ -179,7 +181,7 @@ export class StandardMetadataManager implements RunMetadataManager { const streamInstance = new MetadataStream({ key, runId: this.runId, - iterator: value[Symbol.asyncIterator](), + iterator: $value[Symbol.asyncIterator](), baseUrl: this.streamsBaseUrl, signal, }); diff --git a/packages/core/src/v3/runMetadata/metadataStream.ts b/packages/core/src/v3/runMetadata/metadataStream.ts index 247d81c4c3..0940c76597 100644 --- a/packages/core/src/v3/runMetadata/metadataStream.ts +++ b/packages/core/src/v3/runMetadata/metadataStream.ts @@ -63,8 +63,6 @@ export class MetadataStream { cancel: () => this.controller.abort(), }); - console.log("Posting server stream to ", this.options.baseUrl); - return fetch( `${this.options.baseUrl}/realtime/v1/streams/${this.options.runId}/${this.options.key}`, { diff --git a/packages/core/src/v3/runMetadata/types.ts b/packages/core/src/v3/runMetadata/types.ts index 6e7ece8fef..cfb7841c66 100644 --- a/packages/core/src/v3/runMetadata/types.ts +++ b/packages/core/src/v3/runMetadata/types.ts @@ -13,5 +13,9 @@ export interface RunMetadataManager { decrementKey(key: string, value: number): void; update(metadata: Record): void; flush(requestOptions?: ApiRequestOptions): Promise; - stream(key: string, value: AsyncIterable, signal?: AbortSignal): Promise>; + stream( + key: string, + value: AsyncIterable | ReadableStream, + signal?: AbortSignal + ): Promise>; } diff --git a/packages/rsc/CHANGELOG.md b/packages/rsc/CHANGELOG.md new file mode 100644 index 0000000000..8f9015f8e6 --- /dev/null +++ b/packages/rsc/CHANGELOG.md @@ -0,0 +1 @@ +# @trigger.dev/rsc diff --git a/packages/rsc/README.md b/packages/rsc/README.md new file mode 100644 index 0000000000..36865313f6 --- /dev/null +++ b/packages/rsc/README.md @@ -0,0 +1 @@ +## trigger.dev rsc diff --git a/packages/rsc/package.json b/packages/rsc/package.json new file mode 100644 index 0000000000..95083deef2 --- /dev/null +++ b/packages/rsc/package.json @@ -0,0 +1,76 @@ +{ + "name": "@trigger.dev/rsc", + "version": "3.1.2", + "description": "trigger.dev rsc", + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/triggerdotdev/trigger.dev", + "directory": "packages/react-hooks" + }, + "type": "module", + "files": [ + "dist" + ], + "tshy": { + "selfLink": false, + "main": true, + "module": true, + "project": "./tsconfig.json", + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "sourceDialects": [ + "@triggerdotdev/source" + ] + }, + "scripts": { + "clean": "rimraf dist", + "build": "tshy && pnpm run update-version", + "dev": "tshy --watch", + "typecheck": "tsc --noEmit", + "update-version": "tsx ../../scripts/updateVersion.ts", + "check-exports": "attw --pack ." + }, + "dependencies": { + "@trigger.dev/core": "workspace:^3.1.2", + "mlly": "^1.7.1", + "react": "19.0.0-rc.1", + "react-dom": "19.0.0-rc.1" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.15.4", + "@trigger.dev/build": "workspace:^3.1.2", + "@types/node": "^20.14.14", + "@types/react": "*", + "@types/react-dom": "*", + "rimraf": "^3.0.2", + "tshy": "^3.0.2", + "tsx": "4.17.0", + "typescript": "^5.5.4" + }, + "engines": { + "node": ">=18.20.0" + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "@triggerdotdev/source": "./src/index.ts", + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "module": "./dist/esm/index.js" +} diff --git a/packages/rsc/src/build.ts b/packages/rsc/src/build.ts new file mode 100644 index 0000000000..98b811a9c5 --- /dev/null +++ b/packages/rsc/src/build.ts @@ -0,0 +1,108 @@ +import { BuildExtension } from "@trigger.dev/core/v3/build"; +import { sourceDir } from "./sourceDir.js"; + +export function rscExtension(): BuildExtension { + return { + name: "rsc", + onBuildStart(context) { + context.addLayer({ + id: "rsc", + conditions: ["react-server"], + }); + + context.config.build.conditions ??= []; + context.config.build.conditions.push("react-server"); + + context.registerPlugin({ + name: "rsc", + async setup(build) { + const { resolvePathSync: esmResolveSync } = await import("mlly"); + + build.onResolve({ filter: /^react\/jsx-dev-runtime$/ }, (args) => { + context.logger.debug("Resolving jsx-dev-runtime", { args }); + + try { + const resolvedPath = esmResolveSync(args.path, { + url: sourceDir, + conditions: ["react-server"], + }); + + context.logger.debug("Resolved jsx-dev-runtime", { resolvedPath }); + + return { + path: resolvedPath, + }; + } catch (error) { + context.logger.debug("Failed to resolve jsx-dev-runtime", { error }); + } + + return undefined; + }); + + build.onResolve({ filter: /^react\/jsx-runtime$/ }, (args) => { + context.logger.debug("Resolving jsx-runtime", { args }); + + try { + const resolvedPath = esmResolveSync(args.path, { + url: sourceDir, + conditions: ["react-server"], + }); + + context.logger.debug("Resolved jsx-runtime", { resolvedPath }); + + return { + path: resolvedPath, + }; + } catch (error) { + context.logger.debug("Failed to resolve jsx-runtime", { error }); + } + + return undefined; + }); + + build.onResolve({ filter: /^(react|react-dom)$/ }, (args) => { + context.logger.debug("Resolving react", { args }); + + try { + const resolvedPath = esmResolveSync(args.path, { + url: sourceDir, + conditions: ["react-server"], + }); + + context.logger.debug("Resolved react", { resolvedPath }); + + return { + path: resolvedPath, + }; + } catch (error) { + context.logger.debug("Failed to resolve react", { error }); + } + + return undefined; + }); + + build.onResolve({ filter: /^react-dom\/server$/ }, (args) => { + context.logger.debug("Resolving react-dom/server", { args }); + + try { + const resolvedPath = esmResolveSync(args.path, { + url: sourceDir, + conditions: ["worker"], + }); + + context.logger.debug("Resolved react-dom/server", { resolvedPath }); + + return { + path: resolvedPath, + }; + } catch (error) { + context.logger.debug("Failed to resolve react-dom/server", { error }); + } + + return undefined; + }); + }, + }); + }, + }; +} diff --git a/packages/rsc/src/index.ts b/packages/rsc/src/index.ts new file mode 100644 index 0000000000..11739b46fa --- /dev/null +++ b/packages/rsc/src/index.ts @@ -0,0 +1 @@ +export * from "./build.js"; diff --git a/packages/rsc/src/sourceDir-cjs.cts b/packages/rsc/src/sourceDir-cjs.cts new file mode 100644 index 0000000000..8b3e67e6a7 --- /dev/null +++ b/packages/rsc/src/sourceDir-cjs.cts @@ -0,0 +1,3 @@ +import { pathToFileURL } from "node:url"; +//@ts-ignore - Have to ignore because TSC thinks this is ESM +export const sourceDir = pathToFileURL(__dirname).toString(); diff --git a/packages/rsc/src/sourceDir.ts b/packages/rsc/src/sourceDir.ts new file mode 100644 index 0000000000..85489ac497 --- /dev/null +++ b/packages/rsc/src/sourceDir.ts @@ -0,0 +1,3 @@ +import { fileURLToPath } from "node:url"; +//@ts-ignore +export const sourceDir = fileURLToPath(new URL(".", import.meta.url)); diff --git a/packages/rsc/tsconfig.json b/packages/rsc/tsconfig.json new file mode 100644 index 0000000000..f73f5bea4b --- /dev/null +++ b/packages/rsc/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../.configs/tsconfig.base.json", + "compilerOptions": { + "isolatedDeclarations": false, + "composite": true, + "sourceMap": true, + "stripInternal": true + }, + "include": ["./src/**/*.ts", "./src/**/*.tsx"] +} diff --git a/packages/trigger-sdk/src/v3/metadata.ts b/packages/trigger-sdk/src/v3/metadata.ts index 334dafc6bb..f35131d2f7 100644 --- a/packages/trigger-sdk/src/v3/metadata.ts +++ b/packages/trigger-sdk/src/v3/metadata.ts @@ -142,7 +142,7 @@ async function flushMetadata(requestOptions?: ApiRequestOptions): Promise async function stream( key: string, - value: AsyncIterable, + value: AsyncIterable | ReadableStream, signal?: AbortSignal ): Promise> { return runMetadata.stream(key, value, signal); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a690166fc7..23bc47fa98 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1430,6 +1430,49 @@ importers: specifier: ^5.5.4 version: 5.5.4 + packages/rsc: + dependencies: + '@trigger.dev/core': + specifier: workspace:^3.1.2 + version: link:../core + mlly: + specifier: ^1.7.1 + version: 1.7.1 + react: + specifier: 19.0.0-rc.1 + version: 19.0.0-rc.1 + react-dom: + specifier: 19.0.0-rc.1 + version: 19.0.0-rc.1(react@19.0.0-rc.1) + devDependencies: + '@arethetypeswrong/cli': + specifier: ^0.15.4 + version: 0.15.4 + '@trigger.dev/build': + specifier: workspace:^3.1.2 + version: link:../build + '@types/node': + specifier: ^20.14.14 + version: 20.14.14 + '@types/react': + specifier: '*' + version: 18.3.1 + '@types/react-dom': + specifier: '*' + version: 18.2.7 + rimraf: + specifier: ^3.0.2 + version: 3.0.2 + tshy: + specifier: ^3.0.2 + version: 3.0.2 + tsx: + specifier: 4.17.0 + version: 4.17.0 + typescript: + specifier: ^5.5.4 + version: 5.5.4 + packages/trigger-sdk: dependencies: '@opentelemetry/api': @@ -1628,6 +1671,9 @@ importers: '@next/bundle-analyzer': specifier: ^15.0.2 version: 15.0.2 + '@trigger.dev/rsc': + specifier: workspace:^3 + version: link:../../packages/rsc '@types/node': specifier: ^20 version: 20.14.14 @@ -16439,14 +16485,6 @@ packages: dependencies: acorn: 8.12.1 - /acorn-jsx@5.3.2(acorn@8.10.0): - resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} - peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - dependencies: - acorn: 8.10.0 - dev: true - /acorn-jsx@5.3.2(acorn@8.12.1): resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: @@ -20366,8 +20404,8 @@ packages: resolution: {integrity: sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: - acorn: 8.10.0 - acorn-jsx: 5.3.2(acorn@8.10.0) + acorn: 8.12.1 + acorn-jsx: 5.3.2(acorn@8.12.1) eslint-visitor-keys: 3.4.2 dev: true @@ -26249,6 +26287,15 @@ packages: scheduler: 0.23.0 dev: false + /react-dom@19.0.0-rc.1(react@19.0.0-rc.1): + resolution: {integrity: sha512-k8MfDX+4G+eaa1cXXI9QF4d+pQtYol3nx8vauqRWUEOPqC7NQn2qmEqUsLoSd28rrZUL+R3T2VC+kZ2Hyx1geQ==} + peerDependencies: + react: 19.0.0-rc.1 + dependencies: + react: 19.0.0-rc.1 + scheduler: 0.25.0-rc.1 + dev: false + /react-email@2.1.2(eslint@8.45.0): resolution: {integrity: sha512-HBHhpzEE5es9YUoo7VSj6qy1omjwndxf3/Sb44UJm/uJ2AjmqALo2yryux0CjW9QAVfitc9rxHkLvIb9H87QQw==} engines: {node: '>=18.0.0'} @@ -26663,6 +26710,11 @@ packages: engines: {node: '>=0.10.0'} dev: false + /react@19.0.0-rc.1: + resolution: {integrity: sha512-NZKln+uyPuyHchzP07I6GGYFxdAoaKhehgpCa3ltJGzwE31OYumLeshGaitA1R/fS5d9D2qpZVwTFAr6zCLM9w==} + engines: {node: '>=0.10.0'} + dev: false + /read-cache@1.0.0: resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==} dependencies: @@ -27410,6 +27462,10 @@ packages: dependencies: loose-envify: 1.4.0 + /scheduler@0.25.0-rc.1: + resolution: {integrity: sha512-fVinv2lXqYpKConAMdergOl5owd0rY1O4P/QTe0aWKCqGtu7VsCt1iqQFxSJtqK4Lci/upVSBpGwVC7eWcuS9Q==} + dev: false + /schema-utils@3.3.0: resolution: {integrity: sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==} engines: {node: '>= 10.13.0'} diff --git a/references/nextjs-realtime/package.json b/references/nextjs-realtime/package.json index 8dc7c14507..0c05f0a93b 100644 --- a/references/nextjs-realtime/package.json +++ b/references/nextjs-realtime/package.json @@ -40,6 +40,7 @@ "postcss": "^8", "tailwindcss": "^3.4.1", "trigger.dev": "workspace:^3", + "@trigger.dev/rsc": "workspace:^3", "typescript": "^5" } } \ No newline at end of file diff --git a/references/nextjs-realtime/src/app/page.tsx b/references/nextjs-realtime/src/app/page.tsx index 848dd6f50b..0088e55962 100644 --- a/references/nextjs-realtime/src/app/page.tsx +++ b/references/nextjs-realtime/src/app/page.tsx @@ -14,6 +14,16 @@ export default async function Home() { }, }); + const readAll = await auth.createPublicToken({ + scopes: { + read: { + runs: true, + }, + }, + }); + + console.log({ publicAccessToken, readAll }); + return (
diff --git a/references/nextjs-realtime/src/trigger/rsc.tsx b/references/nextjs-realtime/src/trigger/rsc.tsx new file mode 100644 index 0000000000..620b6752db --- /dev/null +++ b/references/nextjs-realtime/src/trigger/rsc.tsx @@ -0,0 +1,112 @@ +import { openai } from "@ai-sdk/openai"; +import { logger, metadata, schemaTask } from "@trigger.dev/sdk/v3"; +import { streamUI } from "ai/rsc"; +import { z } from "zod"; +import { createStreamableUI } from "ai/rsc"; +import { renderToReadableStream } from "react-dom/server"; + +const LoadingComponent = () =>
getting weather...
; + +const getWeather = async (location: string) => { + await new Promise((resolve) => setTimeout(resolve, 2000)); + return "82°F️ ☀️"; +}; + +interface WeatherProps { + location: string; + weather: string; +} + +const WeatherComponent = (props: WeatherProps) => ( +
+ The weather in {props.location} is {props.weather} +
+); + +export const openaiStreamingRSC = schemaTask({ + id: "openai-streaming-rsc", + description: "Stream RSC data from OpenAI to get the weather", + schema: z.object({ + model: z.string().default("chatgpt-4o-latest"), + prompt: z.string().default("Hello, how are you?"), + }), + run: async ({ model, prompt }) => { + logger.info("Running OpenAI model", { model, prompt }); + + const result = await streamUI({ + model: openai(model), + prompt, + text: ({ content }) =>
{content}
, + tools: { + getWeather: { + description: "Get the weather for a location", + parameters: z.object({ location: z.string() }), + generate: async function* ({ location }) { + yield ; + const weather = await getWeather(location); + return ; + }, + }, + }, + }); + + const stream = await metadata.stream("openai", result.stream); + + let text = ""; + + for await (const chunk of stream) { + logger.log("Received chunk", { chunk }); + + if (chunk.type === "text-delta") { + text += chunk.textDelta; + } + } + + return { text, value: result.value }; + }, +}); + +function App() { + return ( + + + + + + My app + + + + ); +} + +export const weatherUI = schemaTask({ + id: "weather-ui", + description: "Stream weather UI data from this task to the client", + schema: z.object({ + message: z.string(), + }), + run: async ({ message }) => { + const weatherUI = createStreamableUI(); + + weatherUI.update(
Loading...
); + + setTimeout(() => { + weatherUI.done(
{message}
); + }, 1000); + + const readableStream = await renderToReadableStream(, { + onError(error, errorInfo) { + logger.error("Error rendering UI", { error, errorInfo }); + }, + }); + + const stream = await metadata.stream("weather-ui", readableStream); + + for await (const chunk of stream) { + logger.log("Received chunk", { chunk }); + } + + return weatherUI.value; + }, +}); diff --git a/references/nextjs-realtime/trigger.config.ts b/references/nextjs-realtime/trigger.config.ts index 9820fb2223..502be8ad49 100644 --- a/references/nextjs-realtime/trigger.config.ts +++ b/references/nextjs-realtime/trigger.config.ts @@ -1,6 +1,10 @@ import { defineConfig } from "@trigger.dev/sdk/v3"; +import { rscExtension } from "@trigger.dev/rsc"; export default defineConfig({ project: "proj_bzhdaqhlymtuhlrcgbqy", dirs: ["./src/trigger"], + build: { + extensions: [rscExtension()], + }, }); From 95fb46f00e5e044b73451f86f1c9d4de3e8aa6d4 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 18 Nov 2024 13:35:48 +0000 Subject: [PATCH 20/31] Various fixes and accepted recommendations by CodeRabbit --- apps/webapp/app/routes/api.v1.packets.$.ts | 2 +- .../api.v1.projects.$projectRef.runs.ts | 2 +- apps/webapp/app/routes/api.v1.runs.ts | 2 +- .../routes/api.v1.tasks.$taskId.trigger.ts | 2 +- apps/webapp/app/routes/api.v3.runs.$runId.ts | 2 +- .../routes/realtime.v1.batches.$batchId.ts | 2 +- .../app/routes/realtime.v1.runs.$runId.ts | 2 +- apps/webapp/app/routes/realtime.v1.runs.ts | 2 +- .../realtime.v1.streams.$runId.$streamId.ts | 2 +- .../apiBuilder.server.ts | 13 ++++++- packages/core/src/v3/runMetadata/manager.ts | 34 ++++++++++------- .../core/src/v3/runMetadata/metadataStream.ts | 2 - .../react-hooks/src/hooks/useApiClient.ts | 4 +- packages/react-hooks/src/hooks/useRealtime.ts | 24 ++++++++++-- .../react-hooks/src/hooks/useTaskTrigger.ts | 10 ++++- packages/rsc/package.json | 2 +- packages/trigger-sdk/src/v3/metadata.ts | 38 ++++++++++++++++++- packages/trigger-sdk/src/v3/shared.ts | 3 +- 18 files changed, 110 insertions(+), 38 deletions(-) rename apps/webapp/app/services/{routeBuiilders => routeBuilders}/apiBuilder.server.ts (97%) diff --git a/apps/webapp/app/routes/api.v1.packets.$.ts b/apps/webapp/app/routes/api.v1.packets.$.ts index d88773d941..4cbf76c6d3 100644 --- a/apps/webapp/app/routes/api.v1.packets.$.ts +++ b/apps/webapp/app/routes/api.v1.packets.$.ts @@ -2,7 +2,7 @@ import type { ActionFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; import { z } from "zod"; import { authenticateApiRequest } from "~/services/apiAuth.server"; -import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; import { generatePresignedUrl } from "~/v3/r2.server"; const ParamsSchema = z.object({ diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts index 1a9d850a98..fa7375a85c 100644 --- a/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.runs.ts @@ -5,7 +5,7 @@ import { ApiRunListPresenter, ApiRunListSearchParams, } from "~/presenters/v3/ApiRunListPresenter.server"; -import { createLoaderPATApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderPATApiRoute } from "~/services/routeBuilders/apiBuilder.server"; const ParamsSchema = z.object({ projectRef: z.string(), diff --git a/apps/webapp/app/routes/api.v1.runs.ts b/apps/webapp/app/routes/api.v1.runs.ts index 43bc617e2b..8faed5bed2 100644 --- a/apps/webapp/app/routes/api.v1.runs.ts +++ b/apps/webapp/app/routes/api.v1.runs.ts @@ -3,7 +3,7 @@ import { ApiRunListPresenter, ApiRunListSearchParams, } from "~/presenters/v3/ApiRunListPresenter.server"; -import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; export const loader = createLoaderApiRoute( { diff --git a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts index 8586751062..a88100557a 100644 --- a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts +++ b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts @@ -5,7 +5,7 @@ import { z } from "zod"; import { env } from "~/env.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; -import { createActionApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server"; import { ServiceValidationError } from "~/v3/services/baseService.server"; import { OutOfEntitlementError, TriggerTaskService } from "~/v3/services/triggerTask.server"; diff --git a/apps/webapp/app/routes/api.v3.runs.$runId.ts b/apps/webapp/app/routes/api.v3.runs.$runId.ts index f79d8a9233..c0efbd94c8 100644 --- a/apps/webapp/app/routes/api.v3.runs.$runId.ts +++ b/apps/webapp/app/routes/api.v3.runs.$runId.ts @@ -1,7 +1,7 @@ import { json } from "@remix-run/server-runtime"; import { z } from "zod"; import { ApiRetrieveRunPresenter } from "~/presenters/v3/ApiRetrieveRunPresenter.server"; -import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; const ParamsSchema = z.object({ runId: z.string(), diff --git a/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts b/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts index 1563af4d67..ac105fddd2 100644 --- a/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts +++ b/apps/webapp/app/routes/realtime.v1.batches.$batchId.ts @@ -2,7 +2,7 @@ import { json } from "@remix-run/server-runtime"; import { z } from "zod"; import { $replica } from "~/db.server"; import { realtimeClient } from "~/services/realtimeClientGlobal.server"; -import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; const ParamsSchema = z.object({ batchId: z.string(), diff --git a/apps/webapp/app/routes/realtime.v1.runs.$runId.ts b/apps/webapp/app/routes/realtime.v1.runs.$runId.ts index d2cb44b718..4836063d00 100644 --- a/apps/webapp/app/routes/realtime.v1.runs.$runId.ts +++ b/apps/webapp/app/routes/realtime.v1.runs.$runId.ts @@ -2,7 +2,7 @@ import { json } from "@remix-run/server-runtime"; import { z } from "zod"; import { $replica } from "~/db.server"; import { realtimeClient } from "~/services/realtimeClientGlobal.server"; -import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; const ParamsSchema = z.object({ runId: z.string(), diff --git a/apps/webapp/app/routes/realtime.v1.runs.ts b/apps/webapp/app/routes/realtime.v1.runs.ts index b711b230fe..ccb42c0054 100644 --- a/apps/webapp/app/routes/realtime.v1.runs.ts +++ b/apps/webapp/app/routes/realtime.v1.runs.ts @@ -1,6 +1,6 @@ import { z } from "zod"; import { realtimeClient } from "~/services/realtimeClientGlobal.server"; -import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; const SearchParamsSchema = z.object({ tags: z diff --git a/apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts b/apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts index ffbdad5850..85ac49f61c 100644 --- a/apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts +++ b/apps/webapp/app/routes/realtime.v1.streams.$runId.$streamId.ts @@ -2,7 +2,7 @@ import { ActionFunctionArgs } from "@remix-run/server-runtime"; import { z } from "zod"; import { $replica } from "~/db.server"; import { realtimeStreams } from "~/services/realtimeStreamsGlobal.server"; -import { createLoaderApiRoute } from "~/services/routeBuiilders/apiBuilder.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; const ParamsSchema = z.object({ runId: z.string(), diff --git a/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts similarity index 97% rename from apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts rename to apps/webapp/app/services/routeBuilders/apiBuilder.server.ts index f278ecba3f..169477a377 100644 --- a/apps/webapp/app/services/routeBuiilders/apiBuilder.server.ts +++ b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts @@ -13,6 +13,7 @@ import { authenticateApiRequestWithPersonalAccessToken, PersonalAccessTokenAuthenticationResult, } from "../personalAccessToken.server"; +import { safeJsonParse } from "~/utils/json"; type ApiKeyRouteBuilderOptions< TParamsSchema extends z.AnyZodObject | undefined = undefined, @@ -448,7 +449,17 @@ export function createActionApiRoute< ); } - const body = bodySchema.safeParse(JSON.parse(rawBody)); + const rawParsedJson = safeJsonParse(rawBody); + + if (!rawParsedJson) { + return wrapResponse( + request, + json({ error: "Invalid JSON" }, { status: 400 }), + corsStrategy !== "none" + ); + } + + const body = bodySchema.safeParse(rawParsedJson); if (!body.success) { return wrapResponse( request, diff --git a/packages/core/src/v3/runMetadata/manager.ts b/packages/core/src/v3/runMetadata/manager.ts index 45e208687c..5cf4a9daa2 100644 --- a/packages/core/src/v3/runMetadata/manager.ts +++ b/packages/core/src/v3/runMetadata/manager.ts @@ -173,25 +173,31 @@ export class StandardMetadataManager implements RunMetadataManager { return $value; } - // Add the key to the special stream metadata object - this.setKey(`$$stream.${key}`, key); + try { + // Add the key to the special stream metadata object + this.setKey(`$$stream.${key}`, key); - await this.flush(); + await this.flush(); - const streamInstance = new MetadataStream({ - key, - runId: this.runId, - iterator: $value[Symbol.asyncIterator](), - baseUrl: this.streamsBaseUrl, - signal, - }); + const streamInstance = new MetadataStream({ + key, + runId: this.runId, + iterator: $value[Symbol.asyncIterator](), + baseUrl: this.streamsBaseUrl, + signal, + }); - this.activeStreams.set(key, streamInstance); + this.activeStreams.set(key, streamInstance); - // Clean up when stream completes - streamInstance.wait().finally(() => this.activeStreams.delete(key)); + // Clean up when stream completes + streamInstance.wait().finally(() => this.activeStreams.delete(key)); - return streamInstance; + return streamInstance; + } catch (error) { + // Clean up metadata key if stream creation fails + this.deleteKey(`$$stream.${key}`); + throw error; + } } public hasActiveStreams(): boolean { diff --git a/packages/core/src/v3/runMetadata/metadataStream.ts b/packages/core/src/v3/runMetadata/metadataStream.ts index 0940c76597..6b4d28c714 100644 --- a/packages/core/src/v3/runMetadata/metadataStream.ts +++ b/packages/core/src/v3/runMetadata/metadataStream.ts @@ -1,5 +1,3 @@ -import { run } from "node:test"; - export type MetadataOptions = { baseUrl: string; runId: string; diff --git a/packages/react-hooks/src/hooks/useApiClient.ts b/packages/react-hooks/src/hooks/useApiClient.ts index 94f4020baf..10914db305 100644 --- a/packages/react-hooks/src/hooks/useApiClient.ts +++ b/packages/react-hooks/src/hooks/useApiClient.ts @@ -12,8 +12,8 @@ export type UseApiClientOptions = { export function useApiClient(options?: UseApiClientOptions): ApiClient { const auth = useTriggerAuthContextOptional(); - const baseUrl = auth?.baseURL ?? options?.baseURL ?? "https://api.trigger.dev"; - const accessToken = auth?.accessToken ?? options?.accessToken; + const baseUrl = options?.baseURL ?? auth?.baseURL ?? "https://api.trigger.dev"; + const accessToken = options?.accessToken ?? auth?.accessToken; if (!accessToken) { throw new Error("Missing accessToken in TriggerAuthContext or useApiClient options"); diff --git a/packages/react-hooks/src/hooks/useRealtime.ts b/packages/react-hooks/src/hooks/useRealtime.ts index 0f859a9981..80237094fd 100644 --- a/packages/react-hooks/src/hooks/useRealtime.ts +++ b/packages/react-hooks/src/hooks/useRealtime.ts @@ -87,6 +87,10 @@ export function useRealtimeRun( } setError(err as Error); + } finally { + if (abortControllerRef.current) { + abortControllerRef.current = null; + } } }, [runId, mutateRun, abortControllerRef, apiClient, setError]); @@ -104,7 +108,7 @@ export function useRealtimeRun( return () => { stop(); }; - }, [runId, stop, options?.enabled]); + }, [runId, stop, options?.enabled, triggerRequest]); return { run, error, stop }; } @@ -208,6 +212,10 @@ export function useRealtimeRunWithStreams< } setError(err as Error); + } finally { + if (abortControllerRef.current) { + abortControllerRef.current = null; + } } }, [runId, mutateRun, mutateStreams, streamsRef, abortControllerRef, apiClient, setError]); @@ -225,7 +233,7 @@ export function useRealtimeRunWithStreams< return () => { stop(); }; - }, [runId, stop, options?.enabled]); + }, [runId, stop, options?.enabled, triggerRequest]); return { run, streams: streams ?? initialStreamsFallback, error, stop }; } @@ -290,6 +298,10 @@ export function useRealtimeRunsWithTag( } setError(err as Error); + } finally { + if (abortControllerRef.current) { + abortControllerRef.current = null; + } } }, [tag, mutateRuns, runsRef, abortControllerRef, apiClient, setError]); @@ -303,7 +315,7 @@ export function useRealtimeRunsWithTag( return () => { stop(); }; - }, [tag, stop, options?.enabled]); + }, [tag, stop, options?.enabled, triggerRequest]); return { runs: runs ?? [], error, stop }; } @@ -371,6 +383,10 @@ export function useRealtimeBatch( } setError(err as Error); + } finally { + if (abortControllerRef.current) { + abortControllerRef.current = null; + } } }, [batchId, mutateRuns, runsRef, abortControllerRef, apiClient, setError]); @@ -384,7 +400,7 @@ export function useRealtimeBatch( return () => { stop(); }; - }, [batchId, stop, options?.enabled]); + }, [batchId, stop, options?.enabled, triggerRequest]); return { runs: runs ?? [], error, stop }; } diff --git a/packages/react-hooks/src/hooks/useTaskTrigger.ts b/packages/react-hooks/src/hooks/useTaskTrigger.ts index 8528299461..8d5e50e321 100644 --- a/packages/react-hooks/src/hooks/useTaskTrigger.ts +++ b/packages/react-hooks/src/hooks/useTaskTrigger.ts @@ -23,6 +23,7 @@ export interface TriggerInstance { submit: (payload: TaskPayload) => void; isLoading: boolean; handle?: RunHandleFromTypes>; + error?: Error; } export type UseTaskTriggerOptions = UseApiClientOptions; @@ -69,6 +70,7 @@ export function useTaskTrigger( }, isLoading: mutation.isMutating, handle: mutation.data as RunHandleFromTypes>, + error: mutation.error, }; } @@ -124,7 +126,11 @@ export function useRealtimeTaskTrigger( }); return { - ...realtimeInstance, - ...triggerInstance, + submit: triggerInstance.submit, + isLoading: triggerInstance.isLoading, + handle: triggerInstance.handle, + run: realtimeInstance.run, + error: realtimeInstance.error ?? triggerInstance.error, + stop: realtimeInstance.stop, }; } diff --git a/packages/rsc/package.json b/packages/rsc/package.json index 95083deef2..54b003a08d 100644 --- a/packages/rsc/package.json +++ b/packages/rsc/package.json @@ -9,7 +9,7 @@ "repository": { "type": "git", "url": "https://github.com/triggerdotdev/trigger.dev", - "directory": "packages/react-hooks" + "directory": "packages/rsc" }, "type": "module", "files": [ diff --git a/packages/trigger-sdk/src/v3/metadata.ts b/packages/trigger-sdk/src/v3/metadata.ts index f35131d2f7..94cf5de15e 100644 --- a/packages/trigger-sdk/src/v3/metadata.ts +++ b/packages/trigger-sdk/src/v3/metadata.ts @@ -109,14 +109,48 @@ function saveMetadata(metadata: RunMetadata): void { runMetadata.update(metadata); } -function incrementMetadataKey(key: string, value: number) { +/** + * Increments a numeric value in the metadata of the current run by the specified amount. + * This function allows you to atomically increment a numeric metadata value. + * + * @param {string} key - The key of the numeric value to increment. + * @param {number} value - The amount to increment the value by. + * + * @example + * metadata.increment("counter", 1); // Increments counter by 1 + * metadata.increment("score", 10); // Increments score by 10 + */ +function incrementMetadataKey(key: string, value: number = 1) { runMetadata.incrementKey(key, value); } -function decrementMetadataKey(key: string, value: number) { +/** + * Decrements a numeric value in the metadata of the current run by the specified amount. + * This function allows you to atomically decrement a numeric metadata value. + * + * @param {string} key - The key of the numeric value to decrement. + * @param {number} value - The amount to decrement the value by. + * + * @example + * metadata.decrement("counter", 1); // Decrements counter by 1 + * metadata.decrement("score", 5); // Decrements score by 5 + */ +function decrementMetadataKey(key: string, value: number = 1) { runMetadata.decrementKey(key, value); } +/** + * Appends a value to an array in the metadata of the current run. + * If the key doesn't exist, it creates a new array with the value. + * If the key exists but isn't an array, it converts the existing value to an array. + * + * @param {string} key - The key of the array in metadata. + * @param {DeserializedJson} value - The value to append to the array. + * + * @example + * metadata.append("logs", "User logged in"); + * metadata.append("events", { type: "click", timestamp: Date.now() }); + */ function appendMetadataKey(key: string, value: DeserializedJson) { runMetadata.appendKey(key, value); } diff --git a/packages/trigger-sdk/src/v3/shared.ts b/packages/trigger-sdk/src/v3/shared.ts index 31185de19d..1c1b43cb2f 100644 --- a/packages/trigger-sdk/src/v3/shared.ts +++ b/packages/trigger-sdk/src/v3/shared.ts @@ -44,6 +44,7 @@ import type { BatchRunHandleFromTypes, InferRunTypes, inferSchemaIn, + inferToolParameters, RetrieveRunResult, RunHandle, RunHandleFromTypes, @@ -231,7 +232,7 @@ export function createToolTask< tool: { parameters: params.parameters, description: params.description, - execute: async (args: any) => { + execute: async (args: inferToolParameters) => { return task.triggerAndWait(args).unwrap(); }, }, From 485ac329e0ce61502a42d9f7e58a0c6bbdc98160 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 18 Nov 2024 13:45:00 +0000 Subject: [PATCH 21/31] Regenerate pnpm lock file --- pnpm-lock.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 23bc47fa98..19b844cccf 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1401,9 +1401,6 @@ importers: swr: specifier: ^2.2.5 version: 2.2.5(react@18.3.1) - throttleit: - specifier: ^2.1.0 - version: 2.1.0 devDependencies: '@arethetypeswrong/cli': specifier: ^0.15.4 From e192f731e4244e2d8e2e30531b40bc7c5edc4ac4 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 18 Nov 2024 15:11:23 +0000 Subject: [PATCH 22/31] A couple tweaks to rsc and give up on rendering react in tasks for now --- .vscode/launch.json | 8 ++++++ packages/react-hooks/package.json | 2 +- packages/rsc/src/build.ts | 24 +++++++++++----- references/nextjs-realtime/package.json | 2 +- .../nextjs-realtime/src/trigger/rsc.tsx | 28 +++++++------------ references/nextjs-realtime/trigger.config.ts | 2 +- 6 files changed, 38 insertions(+), 28 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 07f723b6b7..f0736642d3 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -45,6 +45,14 @@ "cwd": "${workspaceFolder}/references/v3-catalog", "sourceMaps": true }, + { + "type": "node-terminal", + "request": "launch", + "name": "Debug Dev Next.js Realtime", + "command": "pnpm exec trigger dev", + "cwd": "${workspaceFolder}/references/nextjs-realtime", + "sourceMaps": true + }, { "type": "node-terminal", "request": "launch", diff --git a/packages/react-hooks/package.json b/packages/react-hooks/package.json index d6050309a0..ab5de1622a 100644 --- a/packages/react-hooks/package.json +++ b/packages/react-hooks/package.json @@ -74,4 +74,4 @@ "main": "./dist/commonjs/index.js", "types": "./dist/commonjs/index.d.ts", "module": "./dist/esm/index.js" -} \ No newline at end of file +} diff --git a/packages/rsc/src/build.ts b/packages/rsc/src/build.ts index 98b811a9c5..5436bf6fb0 100644 --- a/packages/rsc/src/build.ts +++ b/packages/rsc/src/build.ts @@ -1,7 +1,12 @@ import { BuildExtension } from "@trigger.dev/core/v3/build"; import { sourceDir } from "./sourceDir.js"; -export function rscExtension(): BuildExtension { +export type RSCExtensionOptions = { + resolveDir?: string; + reactDomEnvironment?: "node" | "worker" | "bun"; +}; + +export function rscExtension(options?: RSCExtensionOptions): BuildExtension { return { name: "rsc", onBuildStart(context) { @@ -10,6 +15,8 @@ export function rscExtension(): BuildExtension { conditions: ["react-server"], }); + const srcDir = options?.resolveDir ?? sourceDir; + context.config.build.conditions ??= []; context.config.build.conditions.push("react-server"); @@ -23,7 +30,7 @@ export function rscExtension(): BuildExtension { try { const resolvedPath = esmResolveSync(args.path, { - url: sourceDir, + url: srcDir, conditions: ["react-server"], }); @@ -44,7 +51,7 @@ export function rscExtension(): BuildExtension { try { const resolvedPath = esmResolveSync(args.path, { - url: sourceDir, + url: srcDir, conditions: ["react-server"], }); @@ -65,7 +72,7 @@ export function rscExtension(): BuildExtension { try { const resolvedPath = esmResolveSync(args.path, { - url: sourceDir, + url: srcDir, conditions: ["react-server"], }); @@ -82,12 +89,15 @@ export function rscExtension(): BuildExtension { }); build.onResolve({ filter: /^react-dom\/server$/ }, (args) => { - context.logger.debug("Resolving react-dom/server", { args }); + const condition = + context.config.runtime === "bun" ? "bun" : options?.reactDomEnvironment ?? "node"; + + context.logger.debug("Resolving react-dom/server", { args, condition }); try { const resolvedPath = esmResolveSync(args.path, { - url: sourceDir, - conditions: ["worker"], + url: srcDir, + conditions: [condition], }); context.logger.debug("Resolved react-dom/server", { resolvedPath }); diff --git a/references/nextjs-realtime/package.json b/references/nextjs-realtime/package.json index 0c05f0a93b..7e2c7257d4 100644 --- a/references/nextjs-realtime/package.json +++ b/references/nextjs-realtime/package.json @@ -34,13 +34,13 @@ }, "devDependencies": { "@next/bundle-analyzer": "^15.0.2", + "@trigger.dev/rsc": "workspace:^3", "@types/node": "^20", "@types/react": "^18", "@types/react-dom": "^18", "postcss": "^8", "tailwindcss": "^3.4.1", "trigger.dev": "workspace:^3", - "@trigger.dev/rsc": "workspace:^3", "typescript": "^5" } } \ No newline at end of file diff --git a/references/nextjs-realtime/src/trigger/rsc.tsx b/references/nextjs-realtime/src/trigger/rsc.tsx index 620b6752db..39d570ad86 100644 --- a/references/nextjs-realtime/src/trigger/rsc.tsx +++ b/references/nextjs-realtime/src/trigger/rsc.tsx @@ -1,9 +1,8 @@ import { openai } from "@ai-sdk/openai"; import { logger, metadata, schemaTask } from "@trigger.dev/sdk/v3"; import { streamUI } from "ai/rsc"; -import { z } from "zod"; -import { createStreamableUI } from "ai/rsc"; import { renderToReadableStream } from "react-dom/server"; +import { z } from "zod"; const LoadingComponent = () =>
getting weather...
; @@ -72,7 +71,6 @@ function App() { - My app @@ -87,26 +85,20 @@ export const weatherUI = schemaTask({ message: z.string(), }), run: async ({ message }) => { - const weatherUI = createStreamableUI(); + logger.info("Running weather UI", { message }); - weatherUI.update(
Loading...
); + const readableStream = await renderToReadableStream(); - setTimeout(() => { - weatherUI.done(
{message}
); - }, 1000); + const reader = readableStream.getReader(); - const readableStream = await renderToReadableStream(, { - onError(error, errorInfo) { - logger.error("Error rendering UI", { error, errorInfo }); - }, - }); + while (true) { + const { done, value } = await reader.read(); - const stream = await metadata.stream("weather-ui", readableStream); + if (done) { + break; + } - for await (const chunk of stream) { - logger.log("Received chunk", { chunk }); + logger.log("Received chunk", { value }); } - - return weatherUI.value; }, }); diff --git a/references/nextjs-realtime/trigger.config.ts b/references/nextjs-realtime/trigger.config.ts index 502be8ad49..96a51a6b94 100644 --- a/references/nextjs-realtime/trigger.config.ts +++ b/references/nextjs-realtime/trigger.config.ts @@ -5,6 +5,6 @@ export default defineConfig({ project: "proj_bzhdaqhlymtuhlrcgbqy", dirs: ["./src/trigger"], build: { - extensions: [rscExtension()], + extensions: [rscExtension({ reactDomEnvironment: "worker" })], }, }); From 227acc17912200961127c0f106a2e8ef9adb84e6 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 18 Nov 2024 15:12:11 +0000 Subject: [PATCH 23/31] Add changeset --- .changeset/swift-glasses-mate.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .changeset/swift-glasses-mate.md diff --git a/.changeset/swift-glasses-mate.md b/.changeset/swift-glasses-mate.md new file mode 100644 index 0000000000..9fbf206e9e --- /dev/null +++ b/.changeset/swift-glasses-mate.md @@ -0,0 +1,10 @@ +--- +"@trigger.dev/react-hooks": patch +"@trigger.dev/sdk": patch +"trigger.dev": patch +"@trigger.dev/build": patch +"@trigger.dev/core": patch +"@trigger.dev/rsc": patch +--- + +Realtime streams From 9e089871d712a3cc800d207be284dcad3bd55340 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Mon, 18 Nov 2024 15:29:35 +0000 Subject: [PATCH 24/31] Remove triggerRequest from the useEffect deps --- packages/react-hooks/src/hooks/useRealtime.ts | 8 +++--- .../batches/[id]/ClientBatchRunDetails.tsx | 26 ++++++++++--------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/packages/react-hooks/src/hooks/useRealtime.ts b/packages/react-hooks/src/hooks/useRealtime.ts index 80237094fd..20d96020d5 100644 --- a/packages/react-hooks/src/hooks/useRealtime.ts +++ b/packages/react-hooks/src/hooks/useRealtime.ts @@ -108,7 +108,7 @@ export function useRealtimeRun( return () => { stop(); }; - }, [runId, stop, options?.enabled, triggerRequest]); + }, [runId, stop, options?.enabled]); return { run, error, stop }; } @@ -233,7 +233,7 @@ export function useRealtimeRunWithStreams< return () => { stop(); }; - }, [runId, stop, options?.enabled, triggerRequest]); + }, [runId, stop, options?.enabled]); return { run, streams: streams ?? initialStreamsFallback, error, stop }; } @@ -315,7 +315,7 @@ export function useRealtimeRunsWithTag( return () => { stop(); }; - }, [tag, stop, options?.enabled, triggerRequest]); + }, [tag, stop, options?.enabled]); return { runs: runs ?? [], error, stop }; } @@ -400,7 +400,7 @@ export function useRealtimeBatch( return () => { stop(); }; - }, [batchId, stop, options?.enabled, triggerRequest]); + }, [batchId, stop, options?.enabled]); return { runs: runs ?? [], error, stop }; } diff --git a/references/nextjs-realtime/src/app/batches/[id]/ClientBatchRunDetails.tsx b/references/nextjs-realtime/src/app/batches/[id]/ClientBatchRunDetails.tsx index 1f0b63f8c9..fbcc934746 100644 --- a/references/nextjs-realtime/src/app/batches/[id]/ClientBatchRunDetails.tsx +++ b/references/nextjs-realtime/src/app/batches/[id]/ClientBatchRunDetails.tsx @@ -1,20 +1,19 @@ "use client"; import { Card, CardContent } from "@/components/ui/card"; -import { TriggerAuthContext, useRealtimeBatch } from "@trigger.dev/react-hooks"; import type { exampleTask } from "@/trigger/example"; +import { useRealtimeBatch } from "@trigger.dev/react-hooks"; import { Badge } from "@/components/ui/badge"; import { Table, TableBody, - TableCaption, TableCell, TableHead, TableHeader, TableRow, } from "@/components/ui/table"; -import { TaskRunShape, AnyRunShape } from "@trigger.dev/sdk/v3"; +import { AnyRunShape, TaskRunShape } from "@trigger.dev/sdk/v3"; import { z } from "zod"; const MetadataSchema = z.object({ @@ -116,8 +115,17 @@ export function BackgroundRunsTable({ runs }: { runs: TaskRunShape(batchId); +function BatchRunTableWrapper({ + batchId, + publicAccessToken, +}: { + batchId: string; + publicAccessToken: string; +}) { + const { runs, error } = useRealtimeBatch(batchId, { + accessToken: publicAccessToken, + baseURL: process.env.NEXT_PUBLIC_TRIGGER_API_URL, + }); console.log(runs); @@ -141,11 +149,5 @@ function BatchRunTableWrapper({ batchId }: { batchId: string }) { } export default function ClientBatchRunDetails({ batchId, jwt }: { batchId: string; jwt: string }) { - return ( - - - - ); + return ; } From f7875aef3d07312305cdab6e94989ef3a0acf62c Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 19 Nov 2024 09:37:57 +0000 Subject: [PATCH 25/31] Improve realtime & frontend authentication errors --- .../app/services/authorization.server.ts | 34 +- .../app/services/realtime/jwtAuth.server.ts | 42 +- .../routeBuilders/apiBuilder.server.ts | 452 +++++++++--------- apps/webapp/test/authorization.test.ts | 323 ++++++++----- packages/core/src/v3/apiClient/stream.ts | 29 +- packages/core/src/v3/jwt.ts | 35 +- .../src/app/runs/[id]/ClientRunDetails.tsx | 18 +- 7 files changed, 558 insertions(+), 375 deletions(-) diff --git a/apps/webapp/app/services/authorization.server.ts b/apps/webapp/app/services/authorization.server.ts index 5baff68c31..66f89e9fcc 100644 --- a/apps/webapp/app/services/authorization.server.ts +++ b/apps/webapp/app/services/authorization.server.ts @@ -35,36 +35,45 @@ export type AuthorizationEntity = { * checkAuthorization(entity, "read", { tasks: ["task_5678"] }); // Returns true * ``` */ +export type AuthorizationResult = { authorized: true } | { authorized: false; reason: string }; + +/** + * Checks if the given entity is authorized to perform a specific action on a resource. + */ export function checkAuthorization( entity: AuthorizationEntity, action: AuthorizationAction, resource: AuthorizationResources, superScopes?: string[] -) { +): AuthorizationResult { // "PRIVATE" is a secret key and has access to everything if (entity.type === "PRIVATE") { - return true; + return { authorized: true }; } // "PUBLIC" is a deprecated key and has no access if (entity.type === "PUBLIC") { - return false; + return { authorized: false, reason: "PUBLIC type is deprecated and has no access" }; } // If the entity has no permissions, deny access if (!entity.scopes || entity.scopes.length === 0) { - return false; + return { + authorized: false, + reason: + "Public Access Token has no permissions. See https://trigger.dev/docs/frontend/overview#authentication for more information.", + }; } // If the resource object is empty, deny access if (Object.keys(resource).length === 0) { - return false; + return { authorized: false, reason: "Resource object is empty" }; } // Check for any of the super scopes if (superScopes && superScopes.length > 0) { if (superScopes.some((permission) => entity.scopes?.includes(permission))) { - return true; + return { authorized: true }; } } @@ -94,10 +103,19 @@ export function checkAuthorization( // If any resource is not authorized, return false if (!resourceAuthorized) { - return false; + return { + authorized: false, + reason: `Public Access Token is missing required permissions. Permissions required for ${resourceValues + .map((v) => `'${action}:${resourceType}:${v}'`) + .join(", ")} but token has the following permissions: ${entity.scopes + .map((s) => `'${s}'`) + .join( + ", " + )}. See https://trigger.dev/docs/frontend/overview#authentication for more information.`, + }; } } // All resources are authorized - return true; + return { authorized: true }; } diff --git a/apps/webapp/app/services/realtime/jwtAuth.server.ts b/apps/webapp/app/services/realtime/jwtAuth.server.ts index 490e2d2adb..4884e85fd4 100644 --- a/apps/webapp/app/services/realtime/jwtAuth.server.ts +++ b/apps/webapp/app/services/realtime/jwtAuth.server.ts @@ -1,3 +1,4 @@ +import { json } from "@remix-run/server-runtime"; import { validateJWT } from "@trigger.dev/core/v3/jwt"; import { findEnvironmentById } from "~/models/runtimeEnvironment.server"; @@ -9,24 +10,51 @@ export async function validatePublicJwtKey(token: string) { const sub = extractJWTSub(token); if (!sub) { - return; + throw json({ error: "Invalid Public Access Token, missing subject." }, { status: 401 }); } const environment = await findEnvironmentById(sub); if (!environment) { - return; + throw json({ error: "Invalid Public Access Token, environment not found." }, { status: 401 }); } - const claims = await validateJWT(token, environment.apiKey); - - if (!claims) { - return; + const result = await validateJWT(token, environment.apiKey); + + if (!result.ok) { + switch (result.code) { + case "ERR_JWT_EXPIRED": { + throw json( + { + error: + "Public Access Token has expired. See https://trigger.dev/docs/frontend/overview#authentication for more information.", + }, + { status: 401 } + ); + } + case "ERR_JWT_CLAIM_INVALID": { + throw json( + { + error: `Public Access Token is invalid: ${result.error}. See https://trigger.dev/docs/frontend/overview#authentication for more information.`, + }, + { status: 401 } + ); + } + default: { + throw json( + { + error: + "Public Access Token is invalid. See https://trigger.dev/docs/frontend/overview#authentication for more information.", + }, + { status: 401 } + ); + } + } } return { environment, - claims, + claims: result.payload, }; } diff --git a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts index 169477a377..5e9649f6b5 100644 --- a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts +++ b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts @@ -74,87 +74,102 @@ export function createLoaderApiRoute< return apiCors(request, json({})); } - const authenticationResult = await authenticateApiRequest(request, { allowJWT }); - - if (!authenticationResult) { - return wrapResponse( - request, - json({ error: "Invalid or Missing API key" }, { status: 401 }), - corsStrategy !== "none" - ); - } + try { + const authenticationResult = await authenticateApiRequest(request, { allowJWT }); - let parsedParams: any = undefined; - if (paramsSchema) { - const parsed = paramsSchema.safeParse(params); - if (!parsed.success) { + if (!authenticationResult) { return wrapResponse( request, - json( - { error: "Params Error", details: fromZodError(parsed.error).details }, - { status: 400 } - ), + json({ error: "Invalid or Missing API key" }, { status: 401 }), corsStrategy !== "none" ); } - parsedParams = parsed.data; - } - let parsedSearchParams: any = undefined; - if (searchParamsSchema) { - const searchParams = Object.fromEntries(new URL(request.url).searchParams); - const parsed = searchParamsSchema.safeParse(searchParams); - if (!parsed.success) { - return wrapResponse( - request, - json( - { error: "Query Error", details: fromZodError(parsed.error).details }, - { status: 400 } - ), - corsStrategy !== "none" - ); + let parsedParams: any = undefined; + if (paramsSchema) { + const parsed = paramsSchema.safeParse(params); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Params Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedParams = parsed.data; } - parsedSearchParams = parsed.data; - } - let parsedHeaders: any = undefined; - if (headersSchema) { - const rawHeaders = Object.fromEntries(request.headers); - const headers = headersSchema.safeParse(rawHeaders); - if (!headers.success) { - return wrapResponse( - request, - json( - { error: "Headers Error", details: fromZodError(headers.error).details }, - { status: 400 } - ), - corsStrategy !== "none" - ); + let parsedSearchParams: any = undefined; + if (searchParamsSchema) { + const searchParams = Object.fromEntries(new URL(request.url).searchParams); + const parsed = searchParamsSchema.safeParse(searchParams); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Query Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedSearchParams = parsed.data; } - parsedHeaders = headers.data; - } - if (authorization) { - const { action, resource, superScopes } = authorization; - const $resource = resource(parsedParams, parsedSearchParams, parsedHeaders); - - logger.debug("Checking authorization", { - action, - resource: $resource, - superScopes, - scopes: authenticationResult.scopes, - }); + let parsedHeaders: any = undefined; + if (headersSchema) { + const rawHeaders = Object.fromEntries(request.headers); + const headers = headersSchema.safeParse(rawHeaders); + if (!headers.success) { + return wrapResponse( + request, + json( + { error: "Headers Error", details: fromZodError(headers.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedHeaders = headers.data; + } - if (!checkAuthorization(authenticationResult, action, $resource, superScopes)) { - return wrapResponse( - request, - json({ error: "Unauthorized" }, { status: 403 }), - corsStrategy !== "none" + if (authorization) { + const { action, resource, superScopes } = authorization; + const $resource = resource(parsedParams, parsedSearchParams, parsedHeaders); + + logger.debug("Checking authorization", { + action, + resource: $resource, + superScopes, + scopes: authenticationResult.scopes, + }); + + const authorizationResult = checkAuthorization( + authenticationResult, + action, + $resource, + superScopes ); + + if (!authorizationResult.authorized) { + return wrapResponse( + request, + json( + { + error: `Unauthorized: ${authorizationResult.reason}`, + code: "unauthorized", + param: "access_token", + type: "authorization", + }, + { status: 403 } + ), + corsStrategy !== "none" + ); + } } - } - try { const result = await handler({ params: parsedParams, searchParams: parsedSearchParams, @@ -164,7 +179,6 @@ export function createLoaderApiRoute< }); return wrapResponse(request, result, corsStrategy !== "none"); } catch (error) { - console.error("Error in API route:", error); if (error instanceof Response) { return wrapResponse(request, error, corsStrategy !== "none"); } @@ -222,67 +236,67 @@ export function createLoaderPATApiRoute< return apiCors(request, json({})); } - const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); - - if (!authenticationResult) { - return wrapResponse( - request, - json({ error: "Invalid or Missing API key" }, { status: 401 }), - corsStrategy !== "none" - ); - } + try { + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); - let parsedParams: any = undefined; - if (paramsSchema) { - const parsed = paramsSchema.safeParse(params); - if (!parsed.success) { + if (!authenticationResult) { return wrapResponse( request, - json( - { error: "Params Error", details: fromZodError(parsed.error).details }, - { status: 400 } - ), + json({ error: "Invalid or Missing API key" }, { status: 401 }), corsStrategy !== "none" ); } - parsedParams = parsed.data; - } - let parsedSearchParams: any = undefined; - if (searchParamsSchema) { - const searchParams = Object.fromEntries(new URL(request.url).searchParams); - const parsed = searchParamsSchema.safeParse(searchParams); - if (!parsed.success) { - return wrapResponse( - request, - json( - { error: "Query Error", details: fromZodError(parsed.error).details }, - { status: 400 } - ), - corsStrategy !== "none" - ); + let parsedParams: any = undefined; + if (paramsSchema) { + const parsed = paramsSchema.safeParse(params); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Params Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedParams = parsed.data; } - parsedSearchParams = parsed.data; - } - let parsedHeaders: any = undefined; - if (headersSchema) { - const rawHeaders = Object.fromEntries(request.headers); - const headers = headersSchema.safeParse(rawHeaders); - if (!headers.success) { - return wrapResponse( - request, - json( - { error: "Headers Error", details: fromZodError(headers.error).details }, - { status: 400 } - ), - corsStrategy !== "none" - ); + let parsedSearchParams: any = undefined; + if (searchParamsSchema) { + const searchParams = Object.fromEntries(new URL(request.url).searchParams); + const parsed = searchParamsSchema.safeParse(searchParams); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Query Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedSearchParams = parsed.data; + } + + let parsedHeaders: any = undefined; + if (headersSchema) { + const rawHeaders = Object.fromEntries(request.headers); + const headers = headersSchema.safeParse(rawHeaders); + if (!headers.success) { + return wrapResponse( + request, + json( + { error: "Headers Error", details: fromZodError(headers.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedHeaders = headers.data; } - parsedHeaders = headers.data; - } - try { const result = await handler({ params: parsedParams, searchParams: parsedSearchParams, @@ -370,127 +384,127 @@ export function createActionApiRoute< } async function action({ request, params }: ActionFunctionArgs) { - const authenticationResult = await authenticateApiRequest(request, { allowJWT }); - - if (!authenticationResult) { - return wrapResponse( - request, - json({ error: "Invalid or Missing API key" }, { status: 401 }), - corsStrategy !== "none" - ); - } - - if (maxContentLength) { - const contentLength = request.headers.get("content-length"); - - if (!contentLength || parseInt(contentLength) > maxContentLength) { - return json({ error: "Request body too large" }, { status: 413 }); - } - } + try { + const authenticationResult = await authenticateApiRequest(request, { allowJWT }); - let parsedParams: any = undefined; - if (paramsSchema) { - const parsed = paramsSchema.safeParse(params); - if (!parsed.success) { + if (!authenticationResult) { return wrapResponse( request, - json( - { error: "Params Error", details: fromZodError(parsed.error).details }, - { status: 400 } - ), + json({ error: "Invalid or Missing API key" }, { status: 401 }), corsStrategy !== "none" ); } - parsedParams = parsed.data; - } - let parsedSearchParams: any = undefined; - if (searchParamsSchema) { - const searchParams = Object.fromEntries(new URL(request.url).searchParams); - const parsed = searchParamsSchema.safeParse(searchParams); - if (!parsed.success) { - return wrapResponse( - request, - json( - { error: "Query Error", details: fromZodError(parsed.error).details }, - { status: 400 } - ), - corsStrategy !== "none" - ); - } - parsedSearchParams = parsed.data; - } + if (maxContentLength) { + const contentLength = request.headers.get("content-length"); - let parsedHeaders: any = undefined; - if (headersSchema) { - const rawHeaders = Object.fromEntries(request.headers); - const headers = headersSchema.safeParse(rawHeaders); - if (!headers.success) { - return wrapResponse( - request, - json( - { error: "Headers Error", details: fromZodError(headers.error).details }, - { status: 400 } - ), - corsStrategy !== "none" - ); + if (!contentLength || parseInt(contentLength) > maxContentLength) { + return json({ error: "Request body too large" }, { status: 413 }); + } } - parsedHeaders = headers.data; - } - let parsedBody: any = undefined; - if (bodySchema) { - const rawBody = await request.text(); - if (rawBody.length === 0) { - return wrapResponse( - request, - json({ error: "Request body is empty" }, { status: 400 }), - corsStrategy !== "none" - ); + let parsedParams: any = undefined; + if (paramsSchema) { + const parsed = paramsSchema.safeParse(params); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Params Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedParams = parsed.data; } - const rawParsedJson = safeJsonParse(rawBody); - - if (!rawParsedJson) { - return wrapResponse( - request, - json({ error: "Invalid JSON" }, { status: 400 }), - corsStrategy !== "none" - ); + let parsedSearchParams: any = undefined; + if (searchParamsSchema) { + const searchParams = Object.fromEntries(new URL(request.url).searchParams); + const parsed = searchParamsSchema.safeParse(searchParams); + if (!parsed.success) { + return wrapResponse( + request, + json( + { error: "Query Error", details: fromZodError(parsed.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedSearchParams = parsed.data; } - const body = bodySchema.safeParse(rawParsedJson); - if (!body.success) { - return wrapResponse( - request, - json({ error: fromZodError(body.error).toString() }, { status: 400 }), - corsStrategy !== "none" - ); + let parsedHeaders: any = undefined; + if (headersSchema) { + const rawHeaders = Object.fromEntries(request.headers); + const headers = headersSchema.safeParse(rawHeaders); + if (!headers.success) { + return wrapResponse( + request, + json( + { error: "Headers Error", details: fromZodError(headers.error).details }, + { status: 400 } + ), + corsStrategy !== "none" + ); + } + parsedHeaders = headers.data; } - parsedBody = body.data; - } - - if (authorization) { - const { action, resource, superScopes } = authorization; - const $resource = resource(parsedParams, parsedSearchParams, parsedHeaders); - logger.debug("Checking authorization", { - action, - resource: $resource, - superScopes, - scopes: authenticationResult.scopes, - }); + let parsedBody: any = undefined; + if (bodySchema) { + const rawBody = await request.text(); + if (rawBody.length === 0) { + return wrapResponse( + request, + json({ error: "Request body is empty" }, { status: 400 }), + corsStrategy !== "none" + ); + } + + const rawParsedJson = safeJsonParse(rawBody); + + if (!rawParsedJson) { + return wrapResponse( + request, + json({ error: "Invalid JSON" }, { status: 400 }), + corsStrategy !== "none" + ); + } + + const body = bodySchema.safeParse(rawParsedJson); + if (!body.success) { + return wrapResponse( + request, + json({ error: fromZodError(body.error).toString() }, { status: 400 }), + corsStrategy !== "none" + ); + } + parsedBody = body.data; + } - if (!checkAuthorization(authenticationResult, action, $resource, superScopes)) { - return wrapResponse( - request, - json({ error: "Unauthorized" }, { status: 403 }), - corsStrategy !== "none" - ); + if (authorization) { + const { action, resource, superScopes } = authorization; + const $resource = resource(parsedParams, parsedSearchParams, parsedHeaders); + + logger.debug("Checking authorization", { + action, + resource: $resource, + superScopes, + scopes: authenticationResult.scopes, + }); + + if (!checkAuthorization(authenticationResult, action, $resource, superScopes)) { + return wrapResponse( + request, + json({ error: "Unauthorized" }, { status: 403 }), + corsStrategy !== "none" + ); + } } - } - try { const result = await handler({ params: parsedParams, searchParams: parsedSearchParams, diff --git a/apps/webapp/test/authorization.test.ts b/apps/webapp/test/authorization.test.ts index 78950222ca..aafe081c00 100644 --- a/apps/webapp/test/authorization.test.ts +++ b/apps/webapp/test/authorization.test.ts @@ -12,110 +12,169 @@ describe("checkAuthorization", () => { const publicJwtEntityNoPermissions: AuthorizationEntity = { type: "PUBLIC_JWT" }; describe("PRIVATE entity", () => { - it("should always return true regardless of action or resource", () => { - expect(checkAuthorization(privateEntity, "read", { runs: "run_1234" })).toBe(true); - expect(checkAuthorization(privateEntity, "read", { tasks: ["task_1", "task_2"] })).toBe(true); - expect(checkAuthorization(privateEntity, "read", { tags: "nonexistent_tag" })).toBe(true); + it("should always return authorized regardless of action or resource", () => { + const result1 = checkAuthorization(privateEntity, "read", { runs: "run_1234" }); + expect(result1.authorized).toBe(true); + expect(result1).not.toHaveProperty("reason"); + + const result2 = checkAuthorization(privateEntity, "read", { tasks: ["task_1", "task_2"] }); + expect(result2.authorized).toBe(true); + expect(result2).not.toHaveProperty("reason"); + + const result3 = checkAuthorization(privateEntity, "read", { tags: "nonexistent_tag" }); + expect(result3.authorized).toBe(true); + expect(result3).not.toHaveProperty("reason"); }); }); describe("PUBLIC entity", () => { - it("should always return false regardless of action or resource", () => { - expect(checkAuthorization(publicEntity, "read", { runs: "run_1234" })).toBe(false); - expect(checkAuthorization(publicEntity, "read", { tasks: ["task_1", "task_2"] })).toBe(false); - expect(checkAuthorization(publicEntity, "read", { tags: "tag_5678" })).toBe(false); + it("should always return unauthorized with reason regardless of action or resource", () => { + const result1 = checkAuthorization(publicEntity, "read", { runs: "run_1234" }); + expect(result1.authorized).toBe(false); + if (!result1.authorized) { + expect(result1.reason).toBe("PUBLIC type is deprecated and has no access"); + } + + const result2 = checkAuthorization(publicEntity, "read", { tasks: ["task_1", "task_2"] }); + expect(result2.authorized).toBe(false); + if (!result2.authorized) { + expect(result2.reason).toBe("PUBLIC type is deprecated and has no access"); + } + + const result3 = checkAuthorization(publicEntity, "read", { tags: "tag_5678" }); + expect(result3.authorized).toBe(false); + if (!result3.authorized) { + expect(result3.reason).toBe("PUBLIC type is deprecated and has no access"); + } }); }); describe("PUBLIC_JWT entity with scope", () => { - it("should return true for specific resource scope", () => { - expect(checkAuthorization(publicJwtEntityWithPermissions, "read", { runs: "run_1234" })).toBe( - true - ); + it("should return authorized for specific resource scope", () => { + const result = checkAuthorization(publicJwtEntityWithPermissions, "read", { + runs: "run_1234", + }); + expect(result.authorized).toBe(true); + expect(result).not.toHaveProperty("reason"); }); - it("should return false for unauthorized specific resources", () => { - expect(checkAuthorization(publicJwtEntityWithPermissions, "read", { runs: "run_5678" })).toBe( - false - ); + it("should return unauthorized with reason for unauthorized specific resources", () => { + const result = checkAuthorization(publicJwtEntityWithPermissions, "read", { + runs: "run_5678", + }); + expect(result.authorized).toBe(false); + if (!result.authorized) { + expect(result.reason).toBe("Missing required permission for read on runs"); + } }); - it("should return true for general resource type scope", () => { - expect( - checkAuthorization(publicJwtEntityWithPermissions, "read", { tasks: "task_1234" }) - ).toBe(true); - expect( - checkAuthorization(publicJwtEntityWithPermissions, "read", { - tasks: ["task_5678", "task_9012"], - }) - ).toBe(true); + it("should return authorized for general resource type scope", () => { + const result1 = checkAuthorization(publicJwtEntityWithPermissions, "read", { + tasks: "task_1234", + }); + expect(result1.authorized).toBe(true); + expect(result1).not.toHaveProperty("reason"); + + const result2 = checkAuthorization(publicJwtEntityWithPermissions, "read", { + tasks: ["task_5678", "task_9012"], + }); + expect(result2.authorized).toBe(true); + expect(result2).not.toHaveProperty("reason"); }); - it("should return true if any resource in an array is authorized", () => { - expect( - checkAuthorization(publicJwtEntityWithPermissions, "read", { - tags: ["tag_1234", "tag_5678"], - }) - ).toBe(true); + it("should return authorized if any resource in an array is authorized", () => { + const result = checkAuthorization(publicJwtEntityWithPermissions, "read", { + tags: ["tag_1234", "tag_5678"], + }); + expect(result.authorized).toBe(true); + expect(result).not.toHaveProperty("reason"); }); - it("should return true for nonexistent resource types", () => { - expect( + it("should return authorized for nonexistent resource types", () => { + const result = checkAuthorization(publicJwtEntityWithPermissions, "read", { // @ts-expect-error - checkAuthorization(publicJwtEntityWithPermissions, "read", { nonexistent: "resource" }) - ).toBe(true); + nonexistent: "resource", + }); + expect(result.authorized).toBe(true); + expect(result).not.toHaveProperty("reason"); }); }); describe("PUBLIC_JWT entity without scope", () => { - it("should always return false regardless of action or resource", () => { - expect(checkAuthorization(publicJwtEntityNoPermissions, "read", { runs: "run_1234" })).toBe( - false - ); - expect( - checkAuthorization(publicJwtEntityNoPermissions, "read", { tasks: ["task_1", "task_2"] }) - ).toBe(false); - expect(checkAuthorization(publicJwtEntityNoPermissions, "read", { tags: "tag_5678" })).toBe( - false - ); + it("should always return unauthorized with reason regardless of action or resource", () => { + const result1 = checkAuthorization(publicJwtEntityNoPermissions, "read", { + runs: "run_1234", + }); + expect(result1.authorized).toBe(false); + if (!result1.authorized) { + expect(result1.reason).toBe("Entity has no permissions"); + } + + const result2 = checkAuthorization(publicJwtEntityNoPermissions, "read", { + tasks: ["task_1", "task_2"], + }); + expect(result2.authorized).toBe(false); + if (!result2.authorized) { + expect(result2.reason).toBe("Entity has no permissions"); + } + + const result3 = checkAuthorization(publicJwtEntityNoPermissions, "read", { + tags: "tag_5678", + }); + expect(result3.authorized).toBe(false); + if (!result3.authorized) { + expect(result3.reason).toBe("Entity has no permissions"); + } }); }); describe("Edge cases", () => { it("should handle empty resource objects", () => { - expect(checkAuthorization(publicJwtEntityWithPermissions, "read", {})).toBe(false); + const result = checkAuthorization(publicJwtEntityWithPermissions, "read", {}); + expect(result.authorized).toBe(false); + if (!result.authorized) { + expect(result.reason).toBe("Resource object is empty"); + } }); it("should handle undefined scope", () => { const entityUndefinedPermissions: AuthorizationEntity = { type: "PUBLIC_JWT" }; - expect(checkAuthorization(entityUndefinedPermissions, "read", { runs: "run_1234" })).toBe( - false - ); + const result = checkAuthorization(entityUndefinedPermissions, "read", { runs: "run_1234" }); + expect(result.authorized).toBe(false); + if (!result.authorized) { + expect(result.reason).toBe("Entity has no permissions"); + } }); it("should handle empty scope array", () => { const entityEmptyPermissions: AuthorizationEntity = { type: "PUBLIC_JWT", scopes: [] }; - expect(checkAuthorization(entityEmptyPermissions, "read", { runs: "run_1234" })).toBe(false); + const result = checkAuthorization(entityEmptyPermissions, "read", { runs: "run_1234" }); + expect(result.authorized).toBe(false); + if (!result.authorized) { + expect(result.reason).toBe("Entity has no permissions"); + } }); - it("should return false if any resource is not authorized", () => { - expect( - checkAuthorization(publicJwtEntityWithPermissions, "read", { - runs: "run_1234", // This is authorized - tasks: "task_5678", // This is authorized (general permission) - tags: "tag_3456", // This is not authorized - }) - ).toBe(false); + it("should return unauthorized if any resource is not authorized", () => { + const result = checkAuthorization(publicJwtEntityWithPermissions, "read", { + runs: "run_1234", // This is authorized + tasks: "task_5678", // This is authorized (general permission) + tags: "tag_3456", // This is not authorized + }); + expect(result.authorized).toBe(false); + if (!result.authorized) { + expect(result.reason).toBe("Missing required permission for read on tags"); + } }); - it("should return true only if all resources are authorized", () => { - expect( - checkAuthorization(publicJwtEntityWithPermissions, "read", { - runs: "run_1234", // This is authorized - tasks: "task_5678", // This is authorized (general permission) - tags: "tag_5678", // This is authorized - }) - ).toBe(true); + it("should return authorized only if all resources are authorized", () => { + const result = checkAuthorization(publicJwtEntityWithPermissions, "read", { + runs: "run_1234", // This is authorized + tasks: "task_5678", // This is authorized (general permission) + tags: "tag_5678", // This is authorized + }); + expect(result.authorized).toBe(true); + expect(result).not.toHaveProperty("reason"); }); }); @@ -131,51 +190,62 @@ describe("checkAuthorization", () => { }; it("should grant access with any of the super scope", () => { - expect( - checkAuthorization(entityWithSuperPermissions, "read", { tasks: "task_1234" }, [ - "read:all", - "admin", - ]) - ).toBe(true); - expect( - checkAuthorization(entityWithSuperPermissions, "read", { tags: ["tag_1", "tag_2"] }, [ - "write:all", - "admin", - ]) - ).toBe(true); + const result1 = checkAuthorization( + entityWithSuperPermissions, + "read", + { tasks: "task_1234" }, + ["read:all", "admin"] + ); + expect(result1.authorized).toBe(true); + expect(result1).not.toHaveProperty("reason"); + + const result2 = checkAuthorization( + entityWithSuperPermissions, + "read", + { tags: ["tag_1", "tag_2"] }, + ["write:all", "admin"] + ); + expect(result2.authorized).toBe(true); + expect(result2).not.toHaveProperty("reason"); }); it("should grant access with one matching super permission", () => { - expect( - checkAuthorization(entityWithOneSuperPermission, "read", { runs: "run_5678" }, [ - "read:all", - "admin", - ]) - ).toBe(true); + const result = checkAuthorization( + entityWithOneSuperPermission, + "read", + { runs: "run_5678" }, + ["read:all", "admin"] + ); + expect(result.authorized).toBe(true); + expect(result).not.toHaveProperty("reason"); }); it("should not grant access when no super scope match", () => { - expect( - checkAuthorization(entityWithOneSuperPermission, "read", { tasks: "task_1234" }, [ - "write:all", - "admin", - ]) - ).toBe(false); + const result = checkAuthorization( + entityWithOneSuperPermission, + "read", + { tasks: "task_1234" }, + ["write:all", "admin"] + ); + expect(result.authorized).toBe(false); + if (!result.authorized) { + expect(result.reason).toBe("Missing required permission for read on tasks"); + } }); it("should grant access to multiple resources with super scope", () => { - expect( - checkAuthorization( - entityWithSuperPermissions, - "read", - { - tasks: "task_1234", - tags: ["tag_1", "tag_2"], - runs: "run_5678", - }, - ["read:all"] - ) - ).toBe(true); + const result = checkAuthorization( + entityWithSuperPermissions, + "read", + { + tasks: "task_1234", + tags: ["tag_1", "tag_2"], + runs: "run_5678", + }, + ["read:all"] + ); + expect(result.authorized).toBe(true); + expect(result).not.toHaveProperty("reason"); }); it("should fall back to specific scope when super scope are not provided", () => { @@ -183,12 +253,19 @@ describe("checkAuthorization", () => { type: "PUBLIC_JWT", scopes: ["read:tasks", "read:tags"], }; - expect( - checkAuthorization(entityWithSpecificPermissions, "read", { tasks: "task_1234" }) - ).toBe(true); - expect(checkAuthorization(entityWithSpecificPermissions, "read", { runs: "run_5678" })).toBe( - false - ); + const result1 = checkAuthorization(entityWithSpecificPermissions, "read", { + tasks: "task_1234", + }); + expect(result1.authorized).toBe(true); + expect(result1).not.toHaveProperty("reason"); + + const result2 = checkAuthorization(entityWithSpecificPermissions, "read", { + runs: "run_5678", + }); + expect(result2.authorized).toBe(false); + if (!result2.authorized) { + expect(result2.reason).toBe("Missing required permission for read on runs"); + } }); }); @@ -199,21 +276,27 @@ describe("checkAuthorization", () => { }; it("should still grant access based on specific scope", () => { - expect( - checkAuthorization(entityWithoutSuperPermissions, "read", { tasks: "task_1234" }, [ - "read:all", - "admin", - ]) - ).toBe(true); + const result = checkAuthorization( + entityWithoutSuperPermissions, + "read", + { tasks: "task_1234" }, + ["read:all", "admin"] + ); + expect(result.authorized).toBe(true); + expect(result).not.toHaveProperty("reason"); }); it("should deny access to resources not in scope", () => { - expect( - checkAuthorization(entityWithoutSuperPermissions, "read", { runs: "run_5678" }, [ - "read:all", - "admin", - ]) - ).toBe(false); + const result = checkAuthorization( + entityWithoutSuperPermissions, + "read", + { runs: "run_5678" }, + ["read:all", "admin"] + ); + expect(result.authorized).toBe(false); + if (!result.authorized) { + expect(result.reason).toBe("Missing required permission for read on runs"); + } }); }); }); diff --git a/packages/core/src/v3/apiClient/stream.ts b/packages/core/src/v3/apiClient/stream.ts index 82aeec3f1f..97f9ee816e 100644 --- a/packages/core/src/v3/apiClient/stream.ts +++ b/packages/core/src/v3/apiClient/stream.ts @@ -1,4 +1,5 @@ import { z } from "zod"; +import { ApiError } from "./errors.js"; export type ZodShapeStreamOptions = { headers?: Record; @@ -12,7 +13,7 @@ export async function zodShapeStream( callback: (shape: z.output) => void | Promise, options?: ZodShapeStreamOptions ) { - const { ShapeStream, Shape } = await import("@electric-sql/client"); + const { ShapeStream, Shape, FetchError } = await import("@electric-sql/client"); const stream = new ShapeStream>({ url, @@ -24,19 +25,27 @@ export async function zodShapeStream( signal: options?.signal, }); - const shape = new Shape(stream); + try { + const shape = new Shape(stream); - const initialRows = await shape.rows; + const initialRows = await shape.rows; - for (const shapeRow of initialRows) { - await callback(schema.parse(shapeRow)); - } - - return shape.subscribe(async (newShape) => { - for (const shapeRow of newShape.rows) { + for (const shapeRow of initialRows) { await callback(schema.parse(shapeRow)); } - }); + + return shape.subscribe(async (newShape) => { + for (const shapeRow of newShape.rows) { + await callback(schema.parse(shapeRow)); + } + }); + } catch (error) { + if (error instanceof FetchError) { + throw ApiError.generate(error.status, error.json, error.message, error.headers); + } else { + throw error; + } + } } export type AsyncIterableStream = AsyncIterable & ReadableStream; diff --git a/packages/core/src/v3/jwt.ts b/packages/core/src/v3/jwt.ts index 4426d2cfc2..d71f1e7f1e 100644 --- a/packages/core/src/v3/jwt.ts +++ b/packages/core/src/v3/jwt.ts @@ -1,3 +1,5 @@ +import type { JWTPayload } from "jose"; + export type GenerateJWTOptions = { secretKey: string; payload: Record; @@ -22,8 +24,19 @@ export async function generateJWT(options: GenerateJWTOptions): Promise .sign(secret); } -export async function validateJWT(token: string, apiKey: string) { - const { jwtVerify } = await import("jose"); +export type ValidationResult = + | { + ok: true; + payload: JWTPayload; + } + | { + ok: false; + error: string; + code: string; + }; + +export async function validateJWT(token: string, apiKey: string): Promise { + const { jwtVerify, errors } = await import("jose"); const secret = new TextEncoder().encode(apiKey); @@ -33,8 +46,20 @@ export async function validateJWT(token: string, apiKey: string) { audience: JWT_AUDIENCE, }); - return payload; - } catch (e) { - return; + return { ok: true, payload }; + } catch (error) { + if (error instanceof errors.JOSEError) { + return { + ok: false, + error: error.message, + code: error.code, + }; + } else { + return { + ok: false, + error: error instanceof Error ? error.message : "Unknown error", + code: "ERR_UNKNOWN", + }; + } } } diff --git a/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx b/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx index 2ee2841cab..280048578f 100644 --- a/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx +++ b/references/nextjs-realtime/src/app/runs/[id]/ClientRunDetails.tsx @@ -5,8 +5,16 @@ import { Card, CardContent } from "@/components/ui/card"; import { TriggerAuthContext, useRealtimeRun } from "@trigger.dev/react-hooks"; import type { exampleTask } from "@/trigger/example"; -function RunDetailsWrapper({ runId }: { runId: string }) { - const { run, error } = useRealtimeRun(runId); +function RunDetailsWrapper({ + runId, + publicAccessToken, +}: { + runId: string; + publicAccessToken: string; +}) { + const { run, error } = useRealtimeRun(runId, { + accessToken: publicAccessToken, + }); if (error) { return ( @@ -41,10 +49,8 @@ function RunDetailsWrapper({ runId }: { runId: string }) { export default function ClientRunDetails({ runId, jwt }: { runId: string; jwt: string }) { return ( - - + + ); } From 50a2f733a9c0ead4578e13c02afaa75409af46fd Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 19 Nov 2024 09:47:24 +0000 Subject: [PATCH 26/31] Fixed authorization tests --- apps/webapp/test/authorization.test.ts | 40 +++++++++++++++++++------- 1 file changed, 30 insertions(+), 10 deletions(-) diff --git a/apps/webapp/test/authorization.test.ts b/apps/webapp/test/authorization.test.ts index aafe081c00..51375b5f03 100644 --- a/apps/webapp/test/authorization.test.ts +++ b/apps/webapp/test/authorization.test.ts @@ -64,7 +64,9 @@ describe("checkAuthorization", () => { }); expect(result.authorized).toBe(false); if (!result.authorized) { - expect(result.reason).toBe("Missing required permission for read on runs"); + expect(result.reason).toBe( + "Public Access Token is missing required permissions. Permissions required for 'read:runs:run_5678' but token has the following permissions: 'read:runs:run_1234', 'read:tasks', 'read:tags:tag_5678'. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); @@ -107,7 +109,9 @@ describe("checkAuthorization", () => { }); expect(result1.authorized).toBe(false); if (!result1.authorized) { - expect(result1.reason).toBe("Entity has no permissions"); + expect(result1.reason).toBe( + "Public Access Token has no permissions. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } const result2 = checkAuthorization(publicJwtEntityNoPermissions, "read", { @@ -115,7 +119,9 @@ describe("checkAuthorization", () => { }); expect(result2.authorized).toBe(false); if (!result2.authorized) { - expect(result2.reason).toBe("Entity has no permissions"); + expect(result2.reason).toBe( + "Public Access Token has no permissions. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } const result3 = checkAuthorization(publicJwtEntityNoPermissions, "read", { @@ -123,7 +129,9 @@ describe("checkAuthorization", () => { }); expect(result3.authorized).toBe(false); if (!result3.authorized) { - expect(result3.reason).toBe("Entity has no permissions"); + expect(result3.reason).toBe( + "Public Access Token has no permissions. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); }); @@ -142,7 +150,9 @@ describe("checkAuthorization", () => { const result = checkAuthorization(entityUndefinedPermissions, "read", { runs: "run_1234" }); expect(result.authorized).toBe(false); if (!result.authorized) { - expect(result.reason).toBe("Entity has no permissions"); + expect(result.reason).toBe( + "Public Access Token has no permissions. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); @@ -151,7 +161,9 @@ describe("checkAuthorization", () => { const result = checkAuthorization(entityEmptyPermissions, "read", { runs: "run_1234" }); expect(result.authorized).toBe(false); if (!result.authorized) { - expect(result.reason).toBe("Entity has no permissions"); + expect(result.reason).toBe( + "Public Access Token has no permissions. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); @@ -163,7 +175,9 @@ describe("checkAuthorization", () => { }); expect(result.authorized).toBe(false); if (!result.authorized) { - expect(result.reason).toBe("Missing required permission for read on tags"); + expect(result.reason).toBe( + "Public Access Token is missing required permissions. Permissions required for 'read:tags:tag_3456' but token has the following permissions: 'read:runs:run_1234', 'read:tasks', 'read:tags:tag_5678'. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); @@ -229,7 +243,9 @@ describe("checkAuthorization", () => { ); expect(result.authorized).toBe(false); if (!result.authorized) { - expect(result.reason).toBe("Missing required permission for read on tasks"); + expect(result.reason).toBe( + "Public Access Token is missing required permissions. Permissions required for 'read:tasks:task_1234' but token has the following permissions: 'read:all'. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); @@ -264,7 +280,9 @@ describe("checkAuthorization", () => { }); expect(result2.authorized).toBe(false); if (!result2.authorized) { - expect(result2.reason).toBe("Missing required permission for read on runs"); + expect(result2.reason).toBe( + "Public Access Token is missing required permissions. Permissions required for 'read:runs:run_5678' but token has the following permissions: 'read:tasks', 'read:tags'. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); }); @@ -295,7 +313,9 @@ describe("checkAuthorization", () => { ); expect(result.authorized).toBe(false); if (!result.authorized) { - expect(result.reason).toBe("Missing required permission for read on runs"); + expect(result.reason).toBe( + "Public Access Token is missing required permissions. Permissions required for 'read:runs:run_5678' but token has the following permissions: 'read:tasks'. See https://trigger.dev/docs/frontend/overview#authentication for more information." + ); } }); }); From feb6486596c83884d318e43578a2011260df122d Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 19 Nov 2024 10:13:56 +0000 Subject: [PATCH 27/31] Remove unnecessary log --- apps/webapp/app/services/routeBuilders/apiBuilder.server.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts index 5e9649f6b5..c419d38cb3 100644 --- a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts +++ b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts @@ -515,7 +515,6 @@ export function createActionApiRoute< }); return wrapResponse(request, result, corsStrategy !== "none"); } catch (error) { - console.error("Error in API route:", error); if (error instanceof Response) { return wrapResponse(request, error, corsStrategy !== "none"); } From 62a75986153f0abee2966386f28e37573d80f6e8 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 19 Nov 2024 11:13:19 +0000 Subject: [PATCH 28/31] Add metadata.stream limits and improve the metadata streams structure --- .../app/presenters/v3/SpanPresenter.server.ts | 2 +- packages/core/src/v3/apiClient/runStream.ts | 44 +++++----- packages/core/src/v3/runMetadata/index.ts | 6 +- packages/core/src/v3/runMetadata/manager.ts | 57 +++++++++++- .../core/src/v3/runMetadata/noopManager.ts | 3 + packages/core/src/v3/runMetadata/types.ts | 1 + packages/core/src/v3/utils/ioSerialization.ts | 69 +++++++++------ packages/core/test/runStream.test.ts | 19 ++-- .../core/test/standardMetadataManager.test.ts | 88 +++++++++++++++++++ packages/trigger-sdk/src/v3/metadata.ts | 16 ++++ 10 files changed, 241 insertions(+), 64 deletions(-) diff --git a/apps/webapp/app/presenters/v3/SpanPresenter.server.ts b/apps/webapp/app/presenters/v3/SpanPresenter.server.ts index 0e11e15d96..348ed704f2 100644 --- a/apps/webapp/app/presenters/v3/SpanPresenter.server.ts +++ b/apps/webapp/app/presenters/v3/SpanPresenter.server.ts @@ -210,7 +210,7 @@ export class SpanPresenter extends BasePresenter { const span = await eventRepository.getSpan(spanId, run.traceId); const metadata = run.metadata - ? await prettyPrintPacket(run.metadata, run.metadataType) + ? await prettyPrintPacket(run.metadata, run.metadataType, { filteredKeys: ["$$streams"] }) : undefined; const context = { diff --git a/packages/core/src/v3/apiClient/runStream.ts b/packages/core/src/v3/apiClient/runStream.ts index 58302e1e32..e8336786a2 100644 --- a/packages/core/src/v3/apiClient/runStream.ts +++ b/packages/core/src/v3/apiClient/runStream.ts @@ -234,28 +234,28 @@ export class RunSubscription { }); // Check for stream metadata - if (run.metadata) { - for (const [key] of Object.entries(run.metadata)) { - if (key.startsWith("$$stream.")) { - const streamKey = key.replace("$$stream.", "") as keyof TStreams; - - if (!activeStreams.has(key)) { - activeStreams.add(key); - - const subscription = this.options.streamFactory.createSubscription( - run.id, - streamKey.toString(), - this.options.client?.baseUrl - ); - - await subscription.subscribe(async (chunk) => { - controller.enqueue({ - type: streamKey, - chunk: chunk as TStreams[typeof streamKey], - run, - } as StreamPartResult, TStreams>); - }); - } + if (run.metadata && "$$streams" in run.metadata && Array.isArray(run.metadata.$$streams)) { + for (const streamKey of run.metadata.$$streams) { + if (typeof streamKey !== "string") { + continue; + } + + if (!activeStreams.has(streamKey)) { + activeStreams.add(streamKey); + + const subscription = this.options.streamFactory.createSubscription( + run.id, + streamKey, + this.options.client?.baseUrl + ); + + await subscription.subscribe(async (chunk) => { + controller.enqueue({ + type: streamKey, + chunk: chunk as TStreams[typeof streamKey], + run, + } as StreamPartResult, TStreams>); + }); } } } diff --git a/packages/core/src/v3/runMetadata/index.ts b/packages/core/src/v3/runMetadata/index.ts index f594985006..16e5cf752e 100644 --- a/packages/core/src/v3/runMetadata/index.ts +++ b/packages/core/src/v3/runMetadata/index.ts @@ -58,7 +58,11 @@ export class RunMetadataAPI implements RunMetadataManager { } appendKey(key: string, value: DeserializedJson): void { - return this.#getManager().appendKey(key, value); + this.#getManager().appendKey(key, value); + } + + removeFromKey(key: string, value: DeserializedJson): void { + this.#getManager().removeFromKey(key, value); } public update(metadata: Record): void { diff --git a/packages/core/src/v3/runMetadata/manager.ts b/packages/core/src/v3/runMetadata/manager.ts index 5cf4a9daa2..922ae43218 100644 --- a/packages/core/src/v3/runMetadata/manager.ts +++ b/packages/core/src/v3/runMetadata/manager.ts @@ -6,6 +6,9 @@ import { RunMetadataManager } from "./types.js"; import { MetadataStream } from "./metadataStream.js"; import { ApiClient } from "../apiClient/index.js"; +const MAXIMUM_ACTIVE_STREAMS = 2; +const MAXIMUM_TOTAL_STREAMS = 5; + export class StandardMetadataManager implements RunMetadataManager { private flushTimeoutId: NodeJS.Timeout | null = null; private hasChanges: boolean = false; @@ -122,6 +125,40 @@ export class StandardMetadataManager implements RunMetadataManager { this.store = nextStore; } + public removeFromKey(key: string, value: DeserializedJson) { + if (!this.runId) { + return; + } + + let nextStore: Record | undefined = this.store + ? structuredClone(this.store) + : {}; + + if (key.startsWith("$.")) { + const path = new JSONHeroPath(key); + const currentValue = path.first(nextStore); + + if (Array.isArray(currentValue)) { + // Remove the value from array using deep equality check + const newArray = currentValue.filter((item) => !dequal(item, value)); + path.set(nextStore, newArray); + } + } else { + const currentValue = nextStore[key]; + + if (Array.isArray(currentValue)) { + // Remove the value from array using deep equality check + nextStore[key] = currentValue.filter((item) => !dequal(item, value)); + } + } + + if (!dequal(this.store, nextStore)) { + this.hasChanges = true; + } + + this.store = nextStore; + } + public incrementKey(key: string, increment: number = 1) { if (!this.runId) { return; @@ -173,9 +210,27 @@ export class StandardMetadataManager implements RunMetadataManager { return $value; } + // Check to make sure we haven't exceeded the max number of active streams + if (this.activeStreams.size >= MAXIMUM_ACTIVE_STREAMS) { + console.warn( + `Exceeded the maximum number of active streams (${MAXIMUM_ACTIVE_STREAMS}). The "${key}" stream will be ignored.` + ); + return $value; + } + + // Check to make sure we haven't exceeded the max number of total streams + const streams = (this.store?.$$streams ?? []) as string[]; + + if (streams.length >= MAXIMUM_TOTAL_STREAMS) { + console.warn( + `Exceeded the maximum number of total streams (${MAXIMUM_TOTAL_STREAMS}). The "${key}" stream will be ignored.` + ); + return $value; + } + try { // Add the key to the special stream metadata object - this.setKey(`$$stream.${key}`, key); + this.appendKey(`$$streams`, key); await this.flush(); diff --git a/packages/core/src/v3/runMetadata/noopManager.ts b/packages/core/src/v3/runMetadata/noopManager.ts index 753dcfb7b1..eb7937f886 100644 --- a/packages/core/src/v3/runMetadata/noopManager.ts +++ b/packages/core/src/v3/runMetadata/noopManager.ts @@ -6,6 +6,9 @@ export class NoopRunMetadataManager implements RunMetadataManager { appendKey(key: string, value: DeserializedJson): void { throw new Error("Method not implemented."); } + removeFromKey(key: string, value: DeserializedJson): void { + throw new Error("Method not implemented."); + } incrementKey(key: string, value: number): void { throw new Error("Method not implemented."); } diff --git a/packages/core/src/v3/runMetadata/types.ts b/packages/core/src/v3/runMetadata/types.ts index cfb7841c66..94133313da 100644 --- a/packages/core/src/v3/runMetadata/types.ts +++ b/packages/core/src/v3/runMetadata/types.ts @@ -9,6 +9,7 @@ export interface RunMetadataManager { setKey(key: string, value: DeserializedJson): void; deleteKey(key: string): void; appendKey(key: string, value: DeserializedJson): void; + removeFromKey(key: string, value: DeserializedJson): void; incrementKey(key: string, value: number): void; decrementKey(key: string, value: number): void; update(metadata: Record): void; diff --git a/packages/core/src/v3/utils/ioSerialization.ts b/packages/core/src/v3/utils/ioSerialization.ts index aa623d6d57..b369d02ffe 100644 --- a/packages/core/src/v3/utils/ioSerialization.ts +++ b/packages/core/src/v3/utils/ioSerialization.ts @@ -271,7 +271,7 @@ export async function createPacketAttributes( try { const parsed = parse(packet.data) as any; - const jsonified = JSON.parse(JSON.stringify(parsed, safeReplacer)); + const jsonified = JSON.parse(JSON.stringify(parsed, makeSafeReplacer())); const result = { ...flattenAttributes(jsonified, dataKey), @@ -319,7 +319,7 @@ export async function createPacketAttributesAsJson( const { deserialize } = await loadSuperJSON(); const deserialized = deserialize(data) as any; - const jsonify = safeJsonParse(JSON.stringify(deserialized, safeReplacer)); + const jsonify = safeJsonParse(JSON.stringify(deserialized, makeSafeReplacer())); return imposeAttributeLimits(flattenAttributes(jsonify, undefined)); case "application/store": @@ -329,7 +329,11 @@ export async function createPacketAttributesAsJson( } } -export async function prettyPrintPacket(rawData: any, dataType?: string): Promise { +export async function prettyPrintPacket( + rawData: any, + dataType?: string, + options?: ReplacerOptions +): Promise { if (rawData === undefined) { return ""; } @@ -347,42 +351,53 @@ export async function prettyPrintPacket(rawData: any, dataType?: string): Promis if (typeof rawData === "string") { rawData = safeJsonParse(rawData); } - return JSON.stringify(rawData, safeReplacer, 2); + return JSON.stringify(rawData, makeSafeReplacer(options), 2); } if (typeof rawData === "string") { return rawData; } - return JSON.stringify(rawData, safeReplacer, 2); + return JSON.stringify(rawData, makeSafeReplacer(options), 2); } -function safeReplacer(key: string, value: any) { - // If it is a BigInt - if (typeof value === "bigint") { - return value.toString(); // Convert to string - } +interface ReplacerOptions { + filteredKeys?: string[]; +} - // if it is a Regex - if (value instanceof RegExp) { - return value.toString(); // Convert to string - } +function makeSafeReplacer(options?: ReplacerOptions) { + return function replacer(key: string, value: any) { + // Check if the key should be filtered out + if (options?.filteredKeys?.includes(key)) { + return undefined; + } - // if it is a Set - if (value instanceof Set) { - return Array.from(value); // Convert to array - } + // If it is a BigInt + if (typeof value === "bigint") { + return value.toString(); + } - // if it is a Map, convert it to an object - if (value instanceof Map) { - const obj: Record = {}; - value.forEach((v, k) => { - obj[k] = v; - }); - return obj; - } + // if it is a Regex + if (value instanceof RegExp) { + return value.toString(); + } + + // if it is a Set + if (value instanceof Set) { + return Array.from(value); + } - return value; // Otherwise return the value as is + // if it is a Map, convert it to an object + if (value instanceof Map) { + const obj: Record = {}; + value.forEach((v, k) => { + obj[k] = v; + }); + return obj; + } + + return value; + }; } function getPacketExtension(outputType: string): string { diff --git a/packages/core/test/runStream.test.ts b/packages/core/test/runStream.test.ts index 50b1b2bfca..7cb1207d13 100644 --- a/packages/core/test/runStream.test.ts +++ b/packages/core/test/runStream.test.ts @@ -219,9 +219,7 @@ describe("RunSubscription", () => { status: "COMPLETED", }); }); -}); -describe("RunSubscription withStreams", () => { it("should handle stream data", async () => { const streamFactory = new TestStreamSubscriptionFactory(); @@ -246,7 +244,7 @@ describe("RunSubscription withStreams", () => { isTest: false, runTags: [], metadata: JSON.stringify({ - "$$stream.openai": "openai", + $$streams: ["openai"], }), metadataType: "application/json", }, @@ -312,7 +310,7 @@ describe("RunSubscription withStreams", () => { isTest: false, runTags: [], metadata: JSON.stringify({ - "$$stream.openai": "openai", + $$streams: ["openai"], }), metadataType: "application/json", }, @@ -331,7 +329,7 @@ describe("RunSubscription withStreams", () => { isTest: false, runTags: [], metadata: JSON.stringify({ - "$$stream.openai": "openai", + $$streams: ["openai"], }), metadataType: "application/json", }, @@ -410,8 +408,7 @@ describe("RunSubscription withStreams", () => { isTest: false, runTags: [], metadata: JSON.stringify({ - "$$stream.openai": "openai", - "$$stream.anthropic": "anthropic", + $$streams: ["openai", "anthropic"], }), metadataType: "application/json", }, @@ -494,7 +491,7 @@ describe("RunSubscription withStreams", () => { isTest: false, runTags: [], metadata: JSON.stringify({ - "$$stream.openai": "openai", + $$streams: ["openai"], }), metadataType: "application/json", }, @@ -513,8 +510,7 @@ describe("RunSubscription withStreams", () => { isTest: false, runTags: [], metadata: JSON.stringify({ - "$$stream.openai": "openai", - "$$stream.anthropic": "anthropic", + $$streams: ["openai", "anthropic"], }), metadataType: "application/json", }, @@ -534,8 +530,7 @@ describe("RunSubscription withStreams", () => { isTest: false, runTags: [], metadata: JSON.stringify({ - "$$stream.openai": "openai", - "$$stream.anthropic": "anthropic", + $$streams: ["openai", "anthropic"], }), metadataType: "application/json", }, diff --git a/packages/core/test/standardMetadataManager.test.ts b/packages/core/test/standardMetadataManager.test.ts index 072b6d3e35..b88bc7fc1a 100644 --- a/packages/core/test/standardMetadataManager.test.ts +++ b/packages/core/test/standardMetadataManager.test.ts @@ -204,4 +204,92 @@ describe("StandardMetadataManager", () => { manager.decrementKey("counter", 3); expect(manager.getKey("counter")).toBe(2); }); + + test("should remove value from array with simple key", () => { + // Setup initial array + manager.setKey("myList", ["first", "second", "third"]); + + // Remove a value + manager.removeFromKey("myList", "second"); + expect(manager.getKey("myList")).toEqual(["first", "third"]); + }); + + test("should remove value from array with JSON path", () => { + // Setup initial nested array + manager.setKey("nested", { items: ["first", "second", "third"] }); + + // Remove a value + manager.removeFromKey("$.nested.items", "second"); + expect(manager.current()).toEqual({ + nested: { + items: ["first", "third"], + }, + }); + }); + + test("should handle removing non-existent value", () => { + // Setup initial array + manager.setKey("myList", ["first", "second"]); + + // Try to remove non-existent value + manager.removeFromKey("myList", "third"); + expect(manager.getKey("myList")).toEqual(["first", "second"]); + }); + + test("should handle removing from non-array values", () => { + // Setup non-array value + manager.setKey("value", "string"); + + // Try to remove from non-array + manager.removeFromKey("value", "something"); + expect(manager.getKey("value")).toBe("string"); + }); + + test("should remove object from array using deep equality", () => { + // Setup array with objects + manager.setKey("objects", [ + { id: 1, name: "first" }, + { id: 2, name: "second" }, + { id: 3, name: "third" }, + ]); + + // Remove object + manager.removeFromKey("objects", { id: 2, name: "second" }); + expect(manager.getKey("objects")).toEqual([ + { id: 1, name: "first" }, + { id: 3, name: "third" }, + ]); + }); + + test("should trigger server update when removing from array", async () => { + // Setup initial array + manager.setKey("myList", ["first", "second", "third"]); + await manager.flush(); + expect(metadataUpdates).toHaveLength(1); + + // Remove value + manager.removeFromKey("myList", "second"); + await manager.flush(); + + expect(metadataUpdates).toHaveLength(2); + expect(metadataUpdates[1]).toEqual({ + metadata: { + myList: ["first", "third"], + }, + }); + }); + + test("should not trigger server update when removing non-existent value", async () => { + // Setup initial array + manager.setKey("myList", ["first", "second"]); + await manager.flush(); + expect(metadataUpdates).toHaveLength(1); + + // Try to remove non-existent value + manager.removeFromKey("myList", "third"); + await manager.flush(); + + // Should not trigger new update since nothing changed + expect(metadataUpdates).toHaveLength(1); + }); }); diff --git a/packages/trigger-sdk/src/v3/metadata.ts b/packages/trigger-sdk/src/v3/metadata.ts index 94cf5de15e..7f9e10f669 100644 --- a/packages/trigger-sdk/src/v3/metadata.ts +++ b/packages/trigger-sdk/src/v3/metadata.ts @@ -27,6 +27,7 @@ export const metadata = { flush: flushMetadata, stream: stream, append: appendMetadataKey, + remove: removeMetadataKey, increment: incrementMetadataKey, decrement: decrementMetadataKey, }; @@ -155,6 +156,21 @@ function appendMetadataKey(key: string, value: DeserializedJson) { runMetadata.appendKey(key, value); } +/** + * Removes a value from an array in the metadata of the current run. + * + * @param {string} key - The key of the array in metadata. + * @param {DeserializedJson} value - The value to remove from the array. + * + * @example + * + * metadata.remove("logs", "User logged in"); + * metadata.remove("events", { type: "click", timestamp: Date.now() }); + */ +function removeMetadataKey(key: string, value: DeserializedJson) { + runMetadata.removeFromKey(key, value); +} + /** * Flushes metadata to the Trigger.dev instance * From 686f47d5347c2d3091dadc4786d102239f987566 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 19 Nov 2024 11:34:31 +0000 Subject: [PATCH 29/31] Streams can now have up to 2500 entries --- .../app/services/realtimeStreams.server.ts | 2 +- pnpm-lock.yaml | 126 ++++++++++++++++-- references/nextjs-realtime/package.json | 4 +- references/nextjs-realtime/src/trigger/ai.ts | 30 +++++ 4 files changed, 147 insertions(+), 15 deletions(-) diff --git a/apps/webapp/app/services/realtimeStreams.server.ts b/apps/webapp/app/services/realtimeStreams.server.ts index c1b64ea314..532fc1762d 100644 --- a/apps/webapp/app/services/realtimeStreams.server.ts +++ b/apps/webapp/app/services/realtimeStreams.server.ts @@ -134,7 +134,7 @@ export class RealtimeStreams { if (line.trim()) { // Avoid unnecessary parsing; assume 'line' is already a JSON string // Add XADD command with MAXLEN option to limit stream size - batchCommands.push([streamKey, "MAXLEN", "~", "1000", "*", "data", line]); + batchCommands.push([streamKey, "MAXLEN", "~", "2500", "*", "data", line]); if (batchCommands.length >= batchSize) { // Send batch using a pipeline diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 19b844cccf..2579f3d512 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1358,7 +1358,7 @@ importers: version: 4.0.14 ai: specifier: ^3.4.33 - version: 3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) + version: 3.4.33(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) defu: specifier: ^6.1.4 version: 6.1.4 @@ -1532,7 +1532,7 @@ importers: version: 8.5.4 ai: specifier: ^3.4.33 - version: 3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) + version: 3.4.33(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) encoding: specifier: ^0.1.13 version: 0.1.13 @@ -1602,8 +1602,8 @@ importers: references/nextjs-realtime: dependencies: '@ai-sdk/openai': - specifier: ^0.0.72 - version: 0.0.72(zod@3.22.3) + specifier: ^1.0.1 + version: 1.0.1(zod@3.22.3) '@fal-ai/serverless-client': specifier: ^0.15.0 version: 0.15.0 @@ -1629,8 +1629,8 @@ importers: specifier: ^7.0.3 version: 7.0.3(next@14.2.15)(react@18.3.1)(uploadthing@7.1.0) ai: - specifier: ^3.4.33 - version: 3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3) + specifier: ^4.0.0 + version: 4.0.0(react@18.3.1)(zod@3.22.3) class-variance-authority: specifier: ^0.7.0 version: 0.7.0 @@ -1902,14 +1902,14 @@ packages: resolution: {integrity: sha512-Ff9+ksdQQB3rMncgqDK78uLznstjyfIf2Arnh22pW8kBpLs6rpKDwgnZT46hin5Hl1WzazzK64DOrhSwYpS7bQ==} dev: false - /@ai-sdk/openai@0.0.72(zod@3.22.3): - resolution: {integrity: sha512-IKsgxIt6KJGkEHyMp975xW5VPmetwhI8g9H6dDmwvemBB41IRQa78YMNttiJqPcgmrZX2QfErOICv1gQvZ1gZg==} + /@ai-sdk/openai@1.0.1(zod@3.22.3): + resolution: {integrity: sha512-snZge8457afWlosVNUn+BG60MrxAPOOm3zmIMxJZih8tneNSiRbTVCbSzAtq/9vsnOHDe5RR83PRl85juOYEnA==} engines: {node: '>=18'} peerDependencies: zod: ^3.0.0 dependencies: - '@ai-sdk/provider': 0.0.26 - '@ai-sdk/provider-utils': 1.0.22(zod@3.22.3) + '@ai-sdk/provider': 1.0.0 + '@ai-sdk/provider-utils': 2.0.0(zod@3.22.3) zod: 3.22.3 dev: false @@ -1943,6 +1943,23 @@ packages: nanoid: 3.3.7 secure-json-parse: 2.7.0 zod: 3.22.3 + dev: true + + /@ai-sdk/provider-utils@2.0.0(zod@3.22.3): + resolution: {integrity: sha512-uITgVJByhtzuQU2ZW+2CidWRmQqTUTp6KADevy+4aRnmILZxY2LCt+UZ/ZtjJqq0MffwkuQPPY21ExmFAQ6kKA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + peerDependenciesMeta: + zod: + optional: true + dependencies: + '@ai-sdk/provider': 1.0.0 + eventsource-parser: 3.0.0 + nanoid: 5.0.8 + secure-json-parse: 2.7.0 + zod: 3.22.3 + dev: false /@ai-sdk/provider@0.0.22: resolution: {integrity: sha512-smZ1/2jL/JSKnbhC6ama/PxI2D/psj+YAe0c0qpd5ComQCNFltg72VFf0rpUSFMmFuj1pCCNoBOCrvyl8HTZHQ==} @@ -1956,6 +1973,14 @@ packages: engines: {node: '>=18'} dependencies: json-schema: 0.4.0 + dev: true + + /@ai-sdk/provider@1.0.0: + resolution: {integrity: sha512-Sj29AzooJ7SYvhPd+AAWt/E7j63E9+AzRnoMHUaJPRYzOd/WDrVNxxv85prF9gDcQ7XPVlSk9j6oAZV9/DXYpA==} + engines: {node: '>=18'} + dependencies: + json-schema: 0.4.0 + dev: false /@ai-sdk/react@0.0.53(react@19.0.0-rc.0)(zod@3.22.3): resolution: {integrity: sha512-sIsmTFoR/QHvUUkltmHwP4bPjwy2vko6j/Nj8ayxLhEHs04Ug+dwXQyfA7MwgimEE3BcDQpWL8ikVj0m3ZILWQ==} @@ -1994,6 +2019,27 @@ packages: swr: 2.2.5(react@18.3.1) throttleit: 2.1.0 zod: 3.22.3 + dev: true + + /@ai-sdk/react@1.0.0(react@18.3.1)(zod@3.22.3): + resolution: {integrity: sha512-BDrZqQA07Btg64JCuhFvBgYV+tt2B8cXINzEqWknGoxqcwgdE8wSLG2gkXoLzyC2Rnj7oj0HHpOhLUxDCmoKZg==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + zod: ^3.0.0 + peerDependenciesMeta: + react: + optional: true + zod: + optional: true + dependencies: + '@ai-sdk/provider-utils': 2.0.0(zod@3.22.3) + '@ai-sdk/ui-utils': 1.0.0(zod@3.22.3) + react: 18.3.1 + swr: 2.2.5(react@18.3.1) + throttleit: 2.1.0 + zod: 3.22.3 + dev: false /@ai-sdk/solid@0.0.43(zod@3.22.3): resolution: {integrity: sha512-7PlPLaeMAu97oOY2gjywvKZMYHF+GDfUxYNcuJ4AZ3/MRBatzs/U2r4ClT1iH8uMOcMg02RX6UKzP5SgnUBjVw==} @@ -2023,6 +2069,7 @@ packages: '@ai-sdk/ui-utils': 0.0.50(zod@3.22.3) transitivePeerDependencies: - zod + dev: true /@ai-sdk/svelte@0.0.45(svelte@4.2.19)(zod@3.22.3): resolution: {integrity: sha512-w5Sdl0ArFIM3Fp8BbH4TUvlrS84WP/jN/wC1+fghMOXd7ceVO3Yhs9r71wTqndhgkLC7LAEX9Ll7ZEPfW9WBDA==} @@ -2056,6 +2103,7 @@ packages: svelte: 4.2.19 transitivePeerDependencies: - zod + dev: true /@ai-sdk/ui-utils@0.0.40(zod@3.22.3): resolution: {integrity: sha512-f0eonPUBO13pIO8jA9IGux7IKMeqpvWK22GBr3tOoSRnO5Wg5GEpXZU1V0Po+unpeZHyEPahrWbj5JfXcyWCqw==} @@ -2089,6 +2137,22 @@ packages: secure-json-parse: 2.7.0 zod: 3.22.3 zod-to-json-schema: 3.23.5(zod@3.22.3) + dev: true + + /@ai-sdk/ui-utils@1.0.0(zod@3.22.3): + resolution: {integrity: sha512-oXBDIM/0niWeTWyw77RVl505dNxBUDLLple7bTsqo2d3i1UKwGlzBUX8XqZsh7GbY7I6V05nlG0Y8iGlWxv1Aw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + peerDependenciesMeta: + zod: + optional: true + dependencies: + '@ai-sdk/provider': 1.0.0 + '@ai-sdk/provider-utils': 2.0.0(zod@3.22.3) + zod: 3.22.3 + zod-to-json-schema: 3.23.5(zod@3.22.3) + dev: false /@ai-sdk/vue@0.0.45(vue@3.4.38)(zod@3.22.3): resolution: {integrity: sha512-bqeoWZqk88TQmfoPgnFUKkrvhOIcOcSH5LMPgzZ8XwDqz5tHHrMHzpPfHCj7XyYn4ROTFK/2kKdC/ta6Ko0fMw==} @@ -2122,6 +2186,7 @@ packages: vue: 3.4.38(typescript@5.5.4) transitivePeerDependencies: - zod + dev: true /@alloc/quick-lru@5.2.0: resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} @@ -15453,6 +15518,7 @@ packages: dependencies: '@types/node': 18.19.20 form-data: 3.0.1 + dev: false /@types/node-forge@1.3.10: resolution: {integrity: sha512-y6PJDYN4xYBxwd22l+OVH35N+1fCYWiuC3aiP2SlXVE6Lo7SS+rSx9r89hLxrP4pn6n1lBGhHJ12pj3F3Mpttw==} @@ -16567,6 +16633,7 @@ packages: engines: {node: '>= 8.0.0'} dependencies: humanize-ms: 1.2.1 + dev: false /aggregate-error@3.1.0: resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} @@ -16631,7 +16698,7 @@ packages: - vue dev: false - /ai@3.4.33(openai@4.68.4)(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3): + /ai@3.4.33(react@18.3.1)(svelte@4.2.19)(vue@3.4.38)(zod@3.22.3): resolution: {integrity: sha512-plBlrVZKwPoRTmM8+D1sJac9Bq8eaa2jiZlHLZIWekKWI1yMWYZvCCEezY9ASPwRhULYDJB2VhKOBUUeg3S5JQ==} engines: {node: '>=18'} peerDependencies: @@ -16663,7 +16730,6 @@ packages: eventsource-parser: 1.1.2 json-schema: 0.4.0 jsondiffpatch: 0.6.0 - openai: 4.68.4(zod@3.22.3) react: 18.3.1 secure-json-parse: 2.7.0 svelte: 4.2.19 @@ -16672,6 +16738,30 @@ packages: transitivePeerDependencies: - solid-js - vue + dev: true + + /ai@4.0.0(react@18.3.1)(zod@3.22.3): + resolution: {integrity: sha512-cqf2GCaXnOPhUU+Ccq6i+5I0jDjnFkzfq7t6mc0SUSibSa1wDPn5J4p8+Joh2fDGDYZOJ44rpTW9hSs40rXNAw==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ^19 || ^19.0.0-rc + zod: ^3.0.0 + peerDependenciesMeta: + react: + optional: true + zod: + optional: true + dependencies: + '@ai-sdk/provider': 1.0.0 + '@ai-sdk/provider-utils': 2.0.0(zod@3.22.3) + '@ai-sdk/react': 1.0.0(react@18.3.1)(zod@3.22.3) + '@ai-sdk/ui-utils': 1.0.0(zod@3.22.3) + '@opentelemetry/api': 1.9.0 + jsondiffpatch: 0.6.0 + react: 18.3.1 + zod: 3.22.3 + zod-to-json-schema: 3.23.5(zod@3.22.3) + dev: false /ajv-formats@2.1.1(ajv@8.12.0): resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} @@ -20938,6 +21028,7 @@ packages: /form-data-encoder@1.7.2: resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==} + dev: false /form-data@2.3.3: resolution: {integrity: sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==} @@ -20984,6 +21075,7 @@ packages: dependencies: node-domexception: 1.0.0 web-streams-polyfill: 4.0.0-beta.3 + dev: false /formdata-polyfill@4.0.10: resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} @@ -21843,6 +21935,7 @@ packages: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} dependencies: ms: 2.1.3 + dev: false /hyphenate-style-name@1.0.4: resolution: {integrity: sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==} @@ -23976,6 +24069,12 @@ packages: hasBin: true dev: false + /nanoid@5.0.8: + resolution: {integrity: sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ==} + engines: {node: ^18 || >=20} + hasBin: true + dev: false + /natural-compare-lite@1.4.0: resolution: {integrity: sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==} dev: true @@ -24137,6 +24236,7 @@ packages: /node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} + dev: false /node-emoji@1.11.0: resolution: {integrity: sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==} @@ -24630,6 +24730,7 @@ packages: zod: 3.22.3 transitivePeerDependencies: - encoding + dev: false /opener@1.5.2: resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} @@ -30797,6 +30898,7 @@ packages: /web-streams-polyfill@4.0.0-beta.3: resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} engines: {node: '>= 14'} + dev: false /webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} diff --git a/references/nextjs-realtime/package.json b/references/nextjs-realtime/package.json index 7e2c7257d4..9fa6429803 100644 --- a/references/nextjs-realtime/package.json +++ b/references/nextjs-realtime/package.json @@ -10,7 +10,7 @@ "dev:trigger": "trigger dev" }, "dependencies": { - "@ai-sdk/openai": "^0.0.72", + "@ai-sdk/openai": "^1.0.1", "@fal-ai/serverless-client": "^0.15.0", "@radix-ui/react-dialog": "^1.0.3", "@radix-ui/react-icons": "^1.3.0", @@ -19,7 +19,7 @@ "@trigger.dev/react-hooks": "workspace:^3", "@trigger.dev/sdk": "workspace:^3", "@uploadthing/react": "^7.0.3", - "ai": "^3.4.33", + "ai": "^4.0.0", "class-variance-authority": "^0.7.0", "clsx": "^2.1.1", "lucide-react": "^0.451.0", diff --git a/references/nextjs-realtime/src/trigger/ai.ts b/references/nextjs-realtime/src/trigger/ai.ts index 935171cac8..776df9b63a 100644 --- a/references/nextjs-realtime/src/trigger/ai.ts +++ b/references/nextjs-realtime/src/trigger/ai.ts @@ -114,3 +114,33 @@ export const openaiStreaming = schemaTask({ return { text }; }, }); + +export const openaiO1Model = schemaTask({ + id: "openai-o1-model", + description: "Stream data from OpenAI to get the weather", + schema: z.object({ + model: z.string().default("o1-preview"), + prompt: z.string().default("Hello, how are you?"), + }), + run: async ({ model, prompt }) => { + logger.info("Running OpenAI model", { model, prompt }); + + const result = await streamText({ + model: openai(model), + prompt, + experimental_continueSteps: true, + }); + + const stream = await metadata.stream("openai", result.textStream); + + let text = ""; + + for await (const chunk of stream) { + logger.log("Received chunk", { chunk }); + + text += chunk; + } + + return { text }; + }, +}); From 841849831e8a7996d22d90c09e530614c909e0d4 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 19 Nov 2024 13:00:50 +0000 Subject: [PATCH 30/31] Various coderabbit fixes --- .../routes/api.v1.tasks.$taskId.trigger.ts | 2 +- .../app/routes/realtime.v1.streams.test.ts | 43 ------------- .../app/services/realtimeStreams.server.ts | 11 +++- apps/webapp/server.ts | 64 +++---------------- packages/core/src/v3/apiClient/runStream.ts | 4 ++ packages/core/src/v3/runMetadata/manager.ts | 2 +- packages/core/src/v3/workers/taskExecutor.ts | 2 +- 7 files changed, 23 insertions(+), 105 deletions(-) delete mode 100644 apps/webapp/app/routes/realtime.v1.streams.test.ts diff --git a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts index a88100557a..8e0df325ac 100644 --- a/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts +++ b/apps/webapp/app/routes/api.v1.tasks.$taskId.trigger.ts @@ -97,7 +97,7 @@ const { action, loader } = createActionApiRoute( } else if (error instanceof OutOfEntitlementError) { return json({ error: error.message }, { status: 422 }); } else if (error instanceof Error) { - return json({ error: error.message }, { status: 400 }); + return json({ error: error.message }, { status: 500 }); } return json({ error: "Something went wrong" }, { status: 500 }); diff --git a/apps/webapp/app/routes/realtime.v1.streams.test.ts b/apps/webapp/app/routes/realtime.v1.streams.test.ts deleted file mode 100644 index 6035b78e7d..0000000000 --- a/apps/webapp/app/routes/realtime.v1.streams.test.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { ActionFunctionArgs } from "@remix-run/server-runtime"; - -export async function action({ request }: ActionFunctionArgs) { - if (!request.body) { - return new Response("No body provided", { status: 400 }); - } - - const reader = request.body.getReader(); - const decoder = new TextDecoder(); - let buffer = ""; - - try { - while (true) { - const { done, value } = await reader.read(); - - if (done) { - if (buffer) { - const data = JSON.parse(buffer); - console.log(`${new Date().toISOString()} Received data at end:`, data); - // You can process the data as needed - } - break; - } - - buffer += decoder.decode(value, { stream: true }); - const lines = buffer.split("\n"); - buffer = lines.pop() || ""; - - for (const line of lines) { - if (line.trim()) { - const data = JSON.parse(line); - console.log(`${new Date().toISOString()} Received data:`, data); - // You can process each data chunk as needed - } - } - } - - return new Response(null, { status: 200 }); - } catch (error) { - console.error("Error processing stream:", error); - return new Response(null, { status: 500 }); - } -} diff --git a/apps/webapp/app/services/realtimeStreams.server.ts b/apps/webapp/app/services/realtimeStreams.server.ts index 532fc1762d..73ee9ca180 100644 --- a/apps/webapp/app/services/realtimeStreams.server.ts +++ b/apps/webapp/app/services/realtimeStreams.server.ts @@ -59,14 +59,18 @@ export class RealtimeStreams { } catch (error) { if (signal.aborted) break; - console.error("Error reading from Redis stream:", error); + logger.error("[RealtimeStreams][streamResponse] Error reading from Redis stream:", { + error, + }); retryCount++; if (retryCount >= maxRetries) throw error; await new Promise((resolve) => setTimeout(resolve, 1000 * retryCount)); } } } catch (error) { - console.error("Fatal error in stream processing:", error); + logger.error("[RealtimeStreams][streamResponse] Fatal error in stream processing:", { + error, + }); controller.error(error); } finally { await cleanup(); @@ -163,7 +167,8 @@ export class RealtimeStreams { return new Response(null, { status: 200 }); } catch (error) { - console.error("Error in ingestData:", error); + logger.error("[RealtimeStreams][ingestData] Error in ingestData:", { error }); + return new Response(null, { status: 500 }); } finally { await cleanup(); diff --git a/apps/webapp/server.ts b/apps/webapp/server.ts index b762aa4740..aec725c801 100644 --- a/apps/webapp/server.ts +++ b/apps/webapp/server.ts @@ -14,9 +14,9 @@ import { RegistryProxy } from "~/v3/registryProxy.server"; const app = express(); -// if (process.env.DISABLE_COMPRESSION !== "1") { -// app.use(compression()); -// } +if (process.env.DISABLE_COMPRESSION !== "1") { + app.use(compression()); +} // http://expressjs.com/en/advanced/best-practice-security.html#at-a-minimum-disable-x-powered-by-header app.disable("x-powered-by"); @@ -73,63 +73,15 @@ if (process.env.HTTP_SERVER_DISABLED !== "true") { next(); }); - app.post("/realtime/v1/streams/express/test", async (req, res) => { - // Ensure the request is a readable stream - const { method, headers } = req; - console.log("Inside /realtime/v1/streams/express/test"); - - if (method !== "POST") { - return res.status(405).send("Method Not Allowed"); - } + app.use((req, res, next) => { + // Generate a unique request ID for each request + const requestId = nanoid(); - // Set encoding to UTF-8 to read string data - req.setEncoding("utf8"); - - let buffer = ""; - - try { - req.on("data", (chunk) => { - buffer += chunk; - const lines = buffer.split("\n"); - buffer = lines.pop() || ""; - - for (const line of lines) { - if (line.trim()) { - const data = JSON.parse(line); - console.log(`${new Date().toISOString()} Received data:`, data); - // You can process each data chunk as needed - } - } - }); - - req.on("end", () => { - if (buffer) { - const data = JSON.parse(buffer); - console.log(`${new Date().toISOString()} Received data at end:`, data); - // You can process the remaining data as needed - } - res.status(200).send(); // Send a success response - }); - - req.on("error", (error) => { - console.error("Error processing stream:", error); - res.status(500).send("Internal Server Error"); - }); - } catch (error) { - console.error("Error processing stream:", error); - res.status(500).send("Internal Server Error"); - } + runWithHttpContext({ requestId, path: req.url, host: req.hostname, method: req.method }, next); }); - // app.use((req, res, next) => { - // // Generate a unique request ID for each request - // const requestId = nanoid(); - - // runWithHttpContext({ requestId, path: req.url, host: req.hostname, method: req.method }, next); - // }); - if (process.env.DASHBOARD_AND_API_DISABLED !== "true") { - // app.use(apiRateLimiter); + app.use(apiRateLimiter); app.all( "*", diff --git a/packages/core/src/v3/apiClient/runStream.ts b/packages/core/src/v3/apiClient/runStream.ts index e8336786a2..d25900595b 100644 --- a/packages/core/src/v3/apiClient/runStream.ts +++ b/packages/core/src/v3/apiClient/runStream.ts @@ -147,6 +147,10 @@ export class SSEStreamSubscriptionFactory implements StreamSubscriptionFactory { ) {} createSubscription(runId: string, streamKey: string, baseUrl?: string): StreamSubscription { + if (!runId || !streamKey) { + throw new Error("runId and streamKey are required"); + } + const url = `${baseUrl ?? this.baseUrl}/realtime/v1/streams/${runId}/${streamKey}`; return new SSEStreamSubscription(url, this.options); } diff --git a/packages/core/src/v3/runMetadata/manager.ts b/packages/core/src/v3/runMetadata/manager.ts index 922ae43218..53bdb45759 100644 --- a/packages/core/src/v3/runMetadata/manager.ts +++ b/packages/core/src/v3/runMetadata/manager.ts @@ -260,7 +260,7 @@ export class StandardMetadataManager implements RunMetadataManager { } // Waits for all the streams to finish - public async waitForAllStreams(timeout: number = 30_000): Promise { + public async waitForAllStreams(timeout: number = 60_000): Promise { if (this.activeStreams.size === 0) { return; } diff --git a/packages/core/src/v3/workers/taskExecutor.ts b/packages/core/src/v3/workers/taskExecutor.ts index 6987fe8d80..06f965c35c 100644 --- a/packages/core/src/v3/workers/taskExecutor.ts +++ b/packages/core/src/v3/workers/taskExecutor.ts @@ -503,7 +503,7 @@ export class TaskExecutor { return this._tracer.startActiveSpan( "waitUntil", async (span) => { - return await waitUntil.blockUntilSettled(30_000); + return await waitUntil.blockUntilSettled(60_000); }, { attributes: { From 722bb45132ca95c678d155dddf00d7a8ac960211 Mon Sep 17 00:00:00 2001 From: Eric Allam Date: Tue, 19 Nov 2024 13:09:45 +0000 Subject: [PATCH 31/31] additional react-hooks jsdocs --- .../react-hooks/src/hooks/useApiClient.ts | 26 +++++++ packages/react-hooks/src/hooks/useRealtime.ts | 54 ++++++++++--- .../react-hooks/src/hooks/useTaskTrigger.ts | 75 +++++++++++++++++++ 3 files changed, 146 insertions(+), 9 deletions(-) diff --git a/packages/react-hooks/src/hooks/useApiClient.ts b/packages/react-hooks/src/hooks/useApiClient.ts index 10914db305..21573521b0 100644 --- a/packages/react-hooks/src/hooks/useApiClient.ts +++ b/packages/react-hooks/src/hooks/useApiClient.ts @@ -3,12 +3,38 @@ import { ApiClient, ApiRequestOptions } from "@trigger.dev/core/v3"; import { useTriggerAuthContextOptional } from "../contexts.js"; +/** + * Configuration options for creating an API client instance. + */ export type UseApiClientOptions = { + /** Optional access token for authentication */ accessToken?: string; + /** Optional base URL for the API endpoints */ baseURL?: string; + /** Optional additional request configuration */ requestOptions?: ApiRequestOptions; }; +/** + * Hook to create an API client instance using authentication context or provided options. + * + * @param {UseApiClientOptions} [options] - Configuration options for the API client + * @returns {ApiClient} An initialized API client instance + * @throws {Error} When no access token is available in either context or options + * + * @example + * ```ts + * // Using context authentication + * const apiClient = useApiClient(); + * + * // Using custom options + * const apiClient = useApiClient({ + * accessToken: "your-access-token", + * baseURL: "https://api.my-trigger.com", + * requestOptions: { retry: { maxAttempts: 10 } } + * }); + * ``` + */ export function useApiClient(options?: UseApiClientOptions): ApiClient { const auth = useTriggerAuthContextOptional(); diff --git a/packages/react-hooks/src/hooks/useRealtime.ts b/packages/react-hooks/src/hooks/useRealtime.ts index 20d96020d5..cc9283f515 100644 --- a/packages/react-hooks/src/hooks/useRealtime.ts +++ b/packages/react-hooks/src/hooks/useRealtime.ts @@ -24,11 +24,12 @@ export type UseRealtimeRunInstance = { }; /** - * hook to subscribe to realtime updates of a task run. + * Hook to subscribe to realtime updates of a task run. * - * @template TTask - The type of the task. - * @param {string} runId - The unique identifier of the run to subscribe to. - * @returns {{ run: RealtimeRun | undefined, error: Error | null }} An object containing the current state of the run and any error encountered. + * @template TTask - The type of the task + * @param {string} [runId] - The unique identifier of the run to subscribe to + * @param {UseRealtimeRunOptions} [options] - Configuration options for the subscription + * @returns {UseRealtimeRunInstance} An object containing the current state of the run, error handling, and control methods * * @example * ```ts @@ -36,6 +37,7 @@ export type UseRealtimeRunInstance = { * const { run, error } = useRealtimeRun('run-id-123'); * ``` */ + export function useRealtimeRun( runId?: string, options?: UseRealtimeRunOptions @@ -133,6 +135,23 @@ export type UseRealtimeRunWithStreamsInstance< stop: () => void; }; +/** + * Hook to subscribe to realtime updates of a task run with associated data streams. + * + * @template TTask - The type of the task + * @template TStreams - The type of the streams data + * @param {string} [runId] - The unique identifier of the run to subscribe to + * @param {UseRealtimeRunOptions} [options] - Configuration options for the subscription + * @returns {UseRealtimeRunWithStreamsInstance} An object containing the current state of the run, streams data, and error handling + * + * @example + * ```ts + * import type { myTask } from './path/to/task'; + * const { run, streams, error } = useRealtimeRunWithStreams('run-id-123'); + * ``` + */ export function useRealtimeRunWithStreams< TTask extends AnyTask = AnyTask, TStreams extends Record = Record, @@ -249,6 +268,22 @@ export type UseRealtimeRunsInstance = { stop: () => void; }; +/** + * Hook to subscribe to realtime updates of task runs filtered by tag(s). + * + * @template TTask - The type of the task + * @param {string | string[]} tag - The tag or array of tags to filter runs by + * @param {UseRealtimeRunOptions} [options] - Configuration options for the subscription + * @returns {UseRealtimeRunsInstance} An object containing the current state of the runs and any error encountered + * + * @example + * ```ts + * import type { myTask } from './path/to/task'; + * const { runs, error } = useRealtimeRunsWithTag('my-tag'); + * // Or with multiple tags + * const { runs, error } = useRealtimeRunsWithTag(['tag1', 'tag2']); + * ``` + */ export function useRealtimeRunsWithTag( tag: string | string[], options?: UseRealtimeRunOptions @@ -321,19 +356,20 @@ export function useRealtimeRunsWithTag( } /** - * hook to subscribe to realtime updates of a batch of task runs. + * Hook to subscribe to realtime updates of a batch of task runs. * - * @template TTask - The type of the task. - * @param {string} batchId - The unique identifier of the batch to subscribe to. - * @returns {{ runs: RealtimeRun[], error: Error | null }} An object containing the current state of the runs and any error encountered. + * @template TTask - The type of the task + * @param {string} batchId - The unique identifier of the batch to subscribe to + * @param {UseRealtimeRunOptions} [options] - Configuration options for the subscription + * @returns {UseRealtimeRunsInstance} An object containing the current state of the runs, error handling, and control methods * * @example - * * ```ts * import type { myTask } from './path/to/task'; * const { runs, error } = useRealtimeBatch('batch-id-123'); * ``` */ + export function useRealtimeBatch( batchId: string, options?: UseRealtimeRunOptions diff --git a/packages/react-hooks/src/hooks/useTaskTrigger.ts b/packages/react-hooks/src/hooks/useTaskTrigger.ts index 8d5e50e321..98524dfd72 100644 --- a/packages/react-hooks/src/hooks/useTaskTrigger.ts +++ b/packages/react-hooks/src/hooks/useTaskTrigger.ts @@ -19,15 +19,41 @@ import { UseRealtimeRunWithStreamsInstance, } from "./useRealtime.js"; +/** + * Base interface for task trigger instances. + * + * @template TTask - The type of the task + */ export interface TriggerInstance { + /** Function to submit the task with a payload */ submit: (payload: TaskPayload) => void; + /** Whether the task is currently being submitted */ isLoading: boolean; + /** The handle returned after successful task submission */ handle?: RunHandleFromTypes>; + /** Any error that occurred during submission */ error?: Error; } export type UseTaskTriggerOptions = UseApiClientOptions; +/** + * Hook to trigger a task and manage its initial execution state. + * + * @template TTask - The type of the task + * @param {TaskIdentifier} id - The identifier of the task to trigger + * @param {UseTaskTriggerOptions} [options] - Configuration options for the task trigger + * @returns {TriggerInstance} An object containing the submit function, loading state, handle, and any errors + * + * @example + * ```ts + * import type { myTask } from './path/to/task'; + * const { submit, isLoading, handle, error } = useTaskTrigger('my-task-id'); + * + * // Submit the task with payload + * submit({ foo: 'bar' }); + * ``` + */ export function useTaskTrigger( id: TaskIdentifier, options?: UseTaskTriggerOptions @@ -74,8 +100,13 @@ export function useTaskTrigger( }; } +/** + * Configuration options for task triggers with realtime updates. + */ export type UseRealtimeTaskTriggerOptions = UseTaskTriggerOptions & { + /** Whether the realtime subscription is enabled */ enabled?: boolean; + /** Optional throttle time in milliseconds for stream updates */ experimental_throttleInMs?: number; }; @@ -88,6 +119,28 @@ export type RealtimeTriggerInstanceWithStreams< handle?: RunHandleFromTypes>; }; +/** + * Hook to trigger a task and subscribe to its realtime updates including stream data. + * + * @template TTask - The type of the task + * @template TStreams - The type of the streams data + * @param {TaskIdentifier} id - The identifier of the task to trigger + * @param {UseRealtimeTaskTriggerOptions} [options] - Configuration options for the task trigger and realtime updates + * @returns {RealtimeTriggerInstanceWithStreams} An object containing the submit function, loading state, + * handle, run state, streams data, and error handling + * + * @example + * ```ts + * import type { myTask } from './path/to/task'; + * const { submit, run, streams, error } = useRealtimeTaskTriggerWithStreams< + * typeof myTask, + * { output: string } + * >('my-task-id'); + * + * // Submit and monitor the task with streams + * submit({ foo: 'bar' }); + * ``` + */ export function useRealtimeTaskTriggerWithStreams< TTask extends AnyTask, TStreams extends Record = Record, @@ -114,6 +167,28 @@ export type RealtimeTriggerInstance = UseRealtimeRunInsta handle?: RunHandleFromTypes>; }; +/** + * Hook to trigger a task and subscribe to its realtime updates. + * + * @template TTask - The type of the task + * @param {TaskIdentifier} id - The identifier of the task to trigger + * @param {UseRealtimeTaskTriggerOptions} [options] - Configuration options for the task trigger and realtime updates + * @returns {RealtimeTriggerInstance} An object containing the submit function, loading state, + * handle, run state, and error handling + * + * @example + * ```ts + * import type { myTask } from './path/to/task'; + * const { submit, run, error, stop } = useRealtimeTaskTrigger('my-task-id'); + * + * // Submit and monitor the task + * submit({ foo: 'bar' }); + * + * // Stop monitoring when needed + * stop(); + * ``` + */ + export function useRealtimeTaskTrigger( id: TaskIdentifier, options?: UseRealtimeTaskTriggerOptions