diff --git a/.changeset/brave-pots-hug.md b/.changeset/brave-pots-hug.md deleted file mode 100644 index 4b8ea7fc1..000000000 --- a/.changeset/brave-pots-hug.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"server": patch ---- - -feat: chat scoped key access to mcp server diff --git a/.changeset/empty-birds-work.md b/.changeset/empty-birds-work.md deleted file mode 100644 index beb5c6922..000000000 --- a/.changeset/empty-birds-work.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"server": patch ---- - -fix: ensure system env compilation is case sensitive diff --git a/.changeset/famous-trams-lick.md b/.changeset/famous-trams-lick.md deleted file mode 100644 index 7e2ca3904..000000000 --- a/.changeset/famous-trams-lick.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"server": patch ---- - -ensure function oauth is respected in install page diff --git a/.changeset/four-poets-promise.md b/.changeset/four-poets-promise.md deleted file mode 100644 index 9a59b3f31..000000000 --- a/.changeset/four-poets-promise.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"dashboard": patch ---- - -fix playground tool parameters not rendering on initial load and add horizontal scroll to responses diff --git a/.changeset/friendly-geese-grab.md b/.changeset/friendly-geese-grab.md deleted file mode 100644 index 9149bae33..000000000 --- a/.changeset/friendly-geese-grab.md +++ /dev/null @@ -1,5 +0,0 @@ ---- - ---- - -feat: Powershell (Windows) installer script for the Gram CLI diff --git a/.changeset/lovely-oranges-give.md b/.changeset/lovely-oranges-give.md deleted file mode 100644 index e19222492..000000000 --- a/.changeset/lovely-oranges-give.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"dashboard": patch ---- - -Restore chat history loading in playground after v5 AI SDK upgrade diff --git a/.changeset/modern-ducks-wonder.md b/.changeset/modern-ducks-wonder.md deleted file mode 100644 index 1b5c20e3e..000000000 --- a/.changeset/modern-ducks-wonder.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"dashboard": patch ---- - -Fixed radix warning about Dialog.Content not having a Dialog.Title child. diff --git a/.changeset/nine-pumas-report.md b/.changeset/nine-pumas-report.md deleted file mode 100644 index 0ba209405..000000000 --- a/.changeset/nine-pumas-report.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"dashboard": patch -"server": patch ---- - -Allow instances.get to return mcp server representations of a toolset. Remove unneeded environment for instances get diff --git a/.changeset/petite-towns-allow.md b/.changeset/petite-towns-allow.md deleted file mode 100644 index 5f2f77979..000000000 --- a/.changeset/petite-towns-allow.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -"function-runners": minor -"server": minor ---- - -Introducing support for large Gram Functions. - -Previously, Gram Functions could only be around 700KiB zipped which was adequate for many use cases but was severely limiting for many others. One example is ChatGPT Apps which can be full fledged React applications with images, CSS and JS assets embedded alongside an MCP server and all running in a Gram Function. Many such apps may not fit into this constrained size. Large Gram Functions addresses this limitation by allowing larger zip files to be deployed with the help of Tigris, an S3-compatible object store that integrates nicely with Fly.io - where we deploy/run Gram Functions. - -During the deployment phase on Gram, we detect if a Gram Function's assets exceed the size limitation and, instead of attaching them in the fly.io machine config directly, we upload them to Tigris and mount a lazy reference to them into machines. - -When a machine boots up to serve a tool call (or resource read), it runs a bootstrap process and detects the lazy file representing the code asset. It then makes a call to the Gram API to get a pre-signed URL to the asset from Tigris and downloads it directly from there. Once done, it continues initialization as normal and handles the tool call. - -There is some overhead in this process compared to directly mounting small functions into machines but for a 1.5MiB file, manual testing indicated that this is still a very snappy process overall with very acceptable overhead (<50ms). In upcoming work, we'll export measurements so users can observe this. diff --git a/.changeset/short-pillows-laugh.md b/.changeset/short-pillows-laugh.md deleted file mode 100644 index 1a126e882..000000000 --- a/.changeset/short-pillows-laugh.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"server": patch ---- - -fix: remove vercel check form cors diff --git a/.changeset/small-garlics-dress.md b/.changeset/small-garlics-dress.md deleted file mode 100644 index 0f7cadec7..000000000 --- a/.changeset/small-garlics-dress.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"server": patch ---- - -feat: reading toolset endpointa available to chat scoped auth diff --git a/.changeset/stale-lions-notice.md b/.changeset/stale-lions-notice.md deleted file mode 100644 index 11f3bc5ce..000000000 --- a/.changeset/stale-lions-notice.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -"dashboard": patch -"server": patch ---- - -- fix SSE streaming response truncation due to chunk boundary misalignment -- `addToolResult()` was called following tool execution, the AI SDK v5 wasn't automatically triggering a follow-up LLM request with the tool results. This is a known limitation with custom transports (vercel/ai#9178). diff --git a/.changeset/swift-candles-build.md b/.changeset/swift-candles-build.md deleted file mode 100644 index 14f1e1df6..000000000 --- a/.changeset/swift-candles-build.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"dashboard": patch ---- - -Replace Shiki with Monaco Editor for viewing large inline specs diff --git a/.changeset/tiny-forks-tap.md b/.changeset/tiny-forks-tap.md deleted file mode 100644 index f6b4bcdc6..000000000 --- a/.changeset/tiny-forks-tap.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"server": patch ---- - -Updated the CORS middleware to include the `User-Agent` header in the `Access-Control-Allow-Headers` response. This allows clients to send the `User-Agent` header in cross-origin requests which is useful for debugging and analytics purposes. diff --git a/.changeset/yellow-days-guess.md b/.changeset/yellow-days-guess.md deleted file mode 100644 index 84d8f99ee..000000000 --- a/.changeset/yellow-days-guess.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -"dashboard": patch ---- - -Ensure stable QueryClient is used for lifetime of web app especially during -development mode hot reloads. diff --git a/client/dashboard/CHANGELOG.md b/client/dashboard/CHANGELOG.md index be89443f4..e4b94aae2 100644 --- a/client/dashboard/CHANGELOG.md +++ b/client/dashboard/CHANGELOG.md @@ -1,5 +1,19 @@ # dashboard +## 0.22.1 + +### Patch Changes + +- a5d6df2: fix playground tool parameters not rendering on initial load and add horizontal scroll to responses +- 013d15d: Restore chat history loading in playground after v5 AI SDK upgrade +- 2667ecf: Fixed radix warning about Dialog.Content not having a Dialog.Title child. +- 90a3b7b: Allow instances.get to return mcp server representations of a toolset. Remove unneeded environment for instances get +- c8a0376: - fix SSE streaming response truncation due to chunk boundary misalignment + - `addToolResult()` was called following tool execution, the AI SDK v5 wasn't automatically triggering a follow-up LLM request with the tool results. This is a known limitation with custom transports (vercel/ai#9178). +- 1a63676: Replace Shiki with Monaco Editor for viewing large inline specs +- e9988d8: Ensure stable QueryClient is used for lifetime of web app especially during + development mode hot reloads. + ## 0.22.0 ### Minor Changes diff --git a/client/dashboard/package.json b/client/dashboard/package.json index 5baefa4d7..cb2b4b4b0 100644 --- a/client/dashboard/package.json +++ b/client/dashboard/package.json @@ -1,7 +1,7 @@ { "name": "dashboard", "private": true, - "version": "0.22.0", + "version": "0.22.1", "type": "module", "scripts": { "dev": "vite", diff --git a/functions/CHANGELOG.md b/functions/CHANGELOG.md index dca835ae9..dce79af0f 100644 --- a/functions/CHANGELOG.md +++ b/functions/CHANGELOG.md @@ -1,5 +1,19 @@ # function-runners +## 0.3.0 + +### Minor Changes + +- 08ce250: Introducing support for large Gram Functions. + + Previously, Gram Functions could only be around 700KiB zipped which was adequate for many use cases but was severely limiting for many others. One example is ChatGPT Apps which can be full fledged React applications with images, CSS and JS assets embedded alongside an MCP server and all running in a Gram Function. Many such apps may not fit into this constrained size. Large Gram Functions addresses this limitation by allowing larger zip files to be deployed with the help of Tigris, an S3-compatible object store that integrates nicely with Fly.io - where we deploy/run Gram Functions. + + During the deployment phase on Gram, we detect if a Gram Function's assets exceed the size limitation and, instead of attaching them in the fly.io machine config directly, we upload them to Tigris and mount a lazy reference to them into machines. + + When a machine boots up to serve a tool call (or resource read), it runs a bootstrap process and detects the lazy file representing the code asset. It then makes a call to the Gram API to get a pre-signed URL to the asset from Tigris and downloads it directly from there. Once done, it continues initialization as normal and handles the tool call. + + There is some overhead in this process compared to directly mounting small functions into machines but for a 1.5MiB file, manual testing indicated that this is still a very snappy process overall with very acceptable overhead (<50ms). In upcoming work, we'll export measurements so users can observe this. + ## 0.2.3 ### Patch Changes diff --git a/functions/package.json b/functions/package.json index af30cbc24..3d6e75848 100644 --- a/functions/package.json +++ b/functions/package.json @@ -2,7 +2,7 @@ "type": "module", "name": "function-runners", "private": true, - "version": "0.2.3", + "version": "0.3.0", "description": "", "main": "index.js", "scripts": { diff --git a/server/CHANGELOG.md b/server/CHANGELOG.md index e4e61b2c8..916347ee2 100644 --- a/server/CHANGELOG.md +++ b/server/CHANGELOG.md @@ -1,5 +1,31 @@ # server +## 0.14.0 + +### Minor Changes + +- 08ce250: Introducing support for large Gram Functions. + + Previously, Gram Functions could only be around 700KiB zipped which was adequate for many use cases but was severely limiting for many others. One example is ChatGPT Apps which can be full fledged React applications with images, CSS and JS assets embedded alongside an MCP server and all running in a Gram Function. Many such apps may not fit into this constrained size. Large Gram Functions addresses this limitation by allowing larger zip files to be deployed with the help of Tigris, an S3-compatible object store that integrates nicely with Fly.io - where we deploy/run Gram Functions. + + During the deployment phase on Gram, we detect if a Gram Function's assets exceed the size limitation and, instead of attaching them in the fly.io machine config directly, we upload them to Tigris and mount a lazy reference to them into machines. + + When a machine boots up to serve a tool call (or resource read), it runs a bootstrap process and detects the lazy file representing the code asset. It then makes a call to the Gram API to get a pre-signed URL to the asset from Tigris and downloads it directly from there. Once done, it continues initialization as normal and handles the tool call. + + There is some overhead in this process compared to directly mounting small functions into machines but for a 1.5MiB file, manual testing indicated that this is still a very snappy process overall with very acceptable overhead (<50ms). In upcoming work, we'll export measurements so users can observe this. + +### Patch Changes + +- 1538ac3: feat: chat scoped key access to mcp server +- 1af4e7f: fix: ensure system env compilation is case sensitive +- ea2f173: ensure function oauth is respected in install page +- 90a3b7b: Allow instances.get to return mcp server representations of a toolset. Remove unneeded environment for instances get +- a062fc7: fix: remove vercel check form cors +- 0818c9a: feat: reading toolset endpointa available to chat scoped auth +- c8a0376: - fix SSE streaming response truncation due to chunk boundary misalignment + - `addToolResult()` was called following tool execution, the AI SDK v5 wasn't automatically triggering a follow-up LLM request with the tool results. This is a known limitation with custom transports (vercel/ai#9178). +- c039dc0: Updated the CORS middleware to include the `User-Agent` header in the `Access-Control-Allow-Headers` response. This allows clients to send the `User-Agent` header in cross-origin requests which is useful for debugging and analytics purposes. + ## 0.13.0 ### Minor Changes diff --git a/server/package.json b/server/package.json index 48b1fa11f..b62ea19c9 100644 --- a/server/package.json +++ b/server/package.json @@ -1,6 +1,6 @@ { "name": "server", - "version": "0.13.0", + "version": "0.14.0", "description": "", "private": true, "main": "index.js",