From 9502227fd43ade49b64739149175cfb5a2f40068 Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 9 Jan 2026 09:42:13 -0800 Subject: [PATCH 01/16] fix(sso): add missing deps to db container for running script (#2746) --- bun.lock | 2 ++ docker/db.Dockerfile | 3 +++ packages/db/package.json | 1 + packages/db/scripts/register-sso-provider.ts | 2 +- 4 files changed, 7 insertions(+), 1 deletion(-) diff --git a/bun.lock b/bun.lock index 47bad97dd4..c08c899810 100644 --- a/bun.lock +++ b/bun.lock @@ -1,5 +1,6 @@ { "lockfileVersion": 1, + "configVersion": 0, "workspaces": { "": { "name": "simstudio", @@ -249,6 +250,7 @@ "dependencies": { "drizzle-orm": "^0.44.5", "postgres": "^3.4.5", + "uuid": "^11.1.0", "zod": "^3.24.2", }, "devDependencies": { diff --git a/docker/db.Dockerfile b/docker/db.Dockerfile index f7a137a675..2242d0abad 100644 --- a/docker/db.Dockerfile +++ b/docker/db.Dockerfile @@ -32,6 +32,9 @@ RUN addgroup -g 1001 -S nodejs && \ # Copy only the necessary files from deps (cached if dependencies don't change) COPY --from=deps --chown=nextjs:nodejs /app/node_modules ./node_modules +# Copy root package.json for workspace resolution +COPY --chown=nextjs:nodejs package.json ./package.json + # Copy package configuration files (needed for migrations) COPY --chown=nextjs:nodejs packages/db/drizzle.config.ts ./packages/db/drizzle.config.ts diff --git a/packages/db/package.json b/packages/db/package.json index fa6da1b5b9..c1fce17eee 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -27,6 +27,7 @@ "dependencies": { "drizzle-orm": "^0.44.5", "postgres": "^3.4.5", + "uuid": "^11.1.0", "zod": "^3.24.2" }, "devDependencies": { diff --git a/packages/db/scripts/register-sso-provider.ts b/packages/db/scripts/register-sso-provider.ts index a978e024d0..9b29acc67d 100644 --- a/packages/db/scripts/register-sso-provider.ts +++ b/packages/db/scripts/register-sso-provider.ts @@ -7,7 +7,7 @@ * SSO provider records into the database, following the exact same logic * as Better Auth's registerSSOProvider endpoint. * - * Usage: bun run packages/db/register-sso-provider.ts + * Usage: bun run packages/db/scripts/register-sso-provider.ts * * Required Environment Variables: * SSO_ENABLED=true From 4da43d937c2fb4bb63f3b7e74b640ed1739814a3 Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Fri, 9 Jan 2026 11:41:04 -0800 Subject: [PATCH 02/16] improvement(docs): multiplier dropped to 1.4 (#2748) --- apps/docs/content/docs/en/execution/costs.mdx | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/apps/docs/content/docs/en/execution/costs.mdx b/apps/docs/content/docs/en/execution/costs.mdx index 960681b0eb..dce00ace96 100644 --- a/apps/docs/content/docs/en/execution/costs.mdx +++ b/apps/docs/content/docs/en/execution/costs.mdx @@ -48,40 +48,40 @@ The model breakdown shows: - **Hosted Models** - Sim provides API keys with a 2x pricing multiplier: + **Hosted Models** - Sim provides API keys with a 1.4x pricing multiplier for Agent blocks: **OpenAI** | Model | Base Price (Input/Output) | Hosted Price (Input/Output) | |-------|---------------------------|----------------------------| - | GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 | - | GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 | - | GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 | - | GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 | - | GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 | - | GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 | - | o1 | $15.00 / $60.00 | $30.00 / $120.00 | - | o3 | $2.00 / $8.00 | $4.00 / $16.00 | - | o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 | + | GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 | + | GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 | + | GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 | + | GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 | + | GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 | + | GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 | + | o1 | $15.00 / $60.00 | $21.00 / $84.00 | + | o3 | $2.00 / $8.00 | $2.80 / $11.20 | + | o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 | **Anthropic** | Model | Base Price (Input/Output) | Hosted Price (Input/Output) | |-------|---------------------------|----------------------------| - | Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 | - | Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 | - | Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 | + | Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 | + | Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 | + | Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 | **Google** | Model | Base Price (Input/Output) | Hosted Price (Input/Output) | |-------|---------------------------|----------------------------| - | Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 | - | Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 | - | Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 | + | Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 | + | Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 | + | Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 | - *The 2x multiplier covers infrastructure and API management costs.* + *The 1.4x multiplier covers infrastructure and API management costs.* From 753600ed60038bb46e850114469e8749fa1b36e2 Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 9 Jan 2026 14:11:57 -0800 Subject: [PATCH 03/16] feat(i18n): update translations (#2749) Co-authored-by: icecrasher321 --- apps/docs/content/docs/de/execution/costs.mdx | 48 +++++++++---------- apps/docs/content/docs/es/execution/costs.mdx | 42 ++++++++-------- apps/docs/content/docs/fr/execution/costs.mdx | 42 ++++++++-------- apps/docs/content/docs/ja/execution/costs.mdx | 44 ++++++++--------- apps/docs/content/docs/zh/execution/costs.mdx | 44 ++++++++--------- apps/docs/i18n.lock | 10 ++-- 6 files changed, 115 insertions(+), 115 deletions(-) diff --git a/apps/docs/content/docs/de/execution/costs.mdx b/apps/docs/content/docs/de/execution/costs.mdx index 1f5da14764..743d43d93a 100644 --- a/apps/docs/content/docs/de/execution/costs.mdx +++ b/apps/docs/content/docs/de/execution/costs.mdx @@ -49,40 +49,40 @@ Die Modellaufschlüsselung zeigt: - **Gehostete Modelle** - Sim stellt API-Schlüssel mit einem 2-fachen Preismultiplikator bereit: + **Hosted Models** - Sim bietet API-Schlüssel mit einem 1,4-fachen Preismultiplikator für Agent-Blöcke: **OpenAI** - | Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) | + | Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) | |-------|---------------------------|----------------------------| - | GPT-5.1 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ | - | GPT-5 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ | - | GPT-5 Mini | 0,25 $ / 2,00 $ | 0,50 $ / 4,00 $ | - | GPT-5 Nano | 0,05 $ / 0,40 $ | 0,10 $ / 0,80 $ | - | GPT-4o | 2,50 $ / 10,00 $ | 5,00 $ / 20,00 $ | - | GPT-4.1 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ | - | GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,80 $ / 3,20 $ | - | GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,20 $ / 0,80 $ | - | o1 | 15,00 $ / 60,00 $ | 30,00 $ / 120,00 $ | - | o3 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ | - | o4 Mini | 1,10 $ / 4,40 $ | 2,20 $ / 8,80 $ | + | GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 | + | GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 | + | GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 | + | GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 | + | GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 | + | GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 | + | o1 | $15.00 / $60.00 | $21.00 / $84.00 | + | o3 | $2.00 / $8.00 | $2.80 / $11.20 | + | o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 | **Anthropic** - | Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) | + | Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) | |-------|---------------------------|----------------------------| - | Claude Opus 4.5 | 5,00 $ / 25,00 $ | 10,00 $ / 50,00 $ | - | Claude Opus 4.1 | 15,00 $ / 75,00 $ | 30,00 $ / 150,00 $ | - | Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ | - | Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ | - | Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 2,00 $ / 10,00 $ | + | Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 | + | Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 | + | Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 | **Google** - | Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) | + | Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) | |-------|---------------------------|----------------------------| - | Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 4,00 $ / 24,00 $ | - | Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ | - | Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,60 $ / 5,00 $ | + | Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 | + | Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 | + | Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 | - *Der 2x-Multiplikator deckt Infrastruktur- und API-Verwaltungskosten ab.* + *Der 1,4-fache Multiplikator deckt Infrastruktur- und API-Verwaltungskosten ab.* diff --git a/apps/docs/content/docs/es/execution/costs.mdx b/apps/docs/content/docs/es/execution/costs.mdx index 52bd7fc577..59c5d386a4 100644 --- a/apps/docs/content/docs/es/execution/costs.mdx +++ b/apps/docs/content/docs/es/execution/costs.mdx @@ -49,40 +49,40 @@ El desglose del modelo muestra: - **Modelos alojados** - Sim proporciona claves API con un multiplicador de precio de 2x: + **Modelos alojados** - Sim proporciona claves API con un multiplicador de precios de 1.4x para bloques de agente: **OpenAI** | Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) | |-------|---------------------------|----------------------------| - | GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 | - | GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 | - | GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 | - | GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 | - | GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 | - | GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 | - | o1 | $15.00 / $60.00 | $30.00 / $120.00 | - | o3 | $2.00 / $8.00 | $4.00 / $16.00 | - | o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 | + | GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 | + | GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 | + | GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 | + | GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 | + | GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 | + | GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 | + | o1 | $15.00 / $60.00 | $21.00 / $84.00 | + | o3 | $2.00 / $8.00 | $2.80 / $11.20 | + | o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 | **Anthropic** | Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) | |-------|---------------------------|----------------------------| - | Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 | - | Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 | - | Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 | + | Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 | + | Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 | + | Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 | **Google** | Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) | |-------|---------------------------|----------------------------| - | Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 | - | Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 | - | Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 | + | Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 | + | Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 | + | Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 | - *El multiplicador 2x cubre los costos de infraestructura y gestión de API.* + *El multiplicador de 1.4x cubre los costos de infraestructura y gestión de API.* diff --git a/apps/docs/content/docs/fr/execution/costs.mdx b/apps/docs/content/docs/fr/execution/costs.mdx index 5b34903448..e18e7b86a2 100644 --- a/apps/docs/content/docs/fr/execution/costs.mdx +++ b/apps/docs/content/docs/fr/execution/costs.mdx @@ -49,40 +49,40 @@ La répartition des modèles montre : - **Modèles hébergés** - Sim fournit des clés API avec un multiplicateur de prix de 2x : + **Modèles hébergés** - Sim fournit des clés API avec un multiplicateur de prix de 1,4x pour les blocs Agent : **OpenAI** | Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) | |-------|---------------------------|----------------------------| - | GPT-5.1 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ | - | GPT-5 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ | - | GPT-5 Mini | 0,25 $ / 2,00 $ | 0,50 $ / 4,00 $ | - | GPT-5 Nano | 0,05 $ / 0,40 $ | 0,10 $ / 0,80 $ | - | GPT-4o | 2,50 $ / 10,00 $ | 5,00 $ / 20,00 $ | - | GPT-4.1 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ | - | GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,80 $ / 3,20 $ | - | GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,20 $ / 0,80 $ | - | o1 | 15,00 $ / 60,00 $ | 30,00 $ / 120,00 $ | - | o3 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ | - | o4 Mini | 1,10 $ / 4,40 $ | 2,20 $ / 8,80 $ | + | GPT-5.1 | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ | + | GPT-5 | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ | + | GPT-5 Mini | 0,25 $ / 2,00 $ | 0,35 $ / 2,80 $ | + | GPT-5 Nano | 0,05 $ / 0,40 $ | 0,07 $ / 0,56 $ | + | GPT-4o | 2,50 $ / 10,00 $ | 3,50 $ / 14,00 $ | + | GPT-4.1 | 2,00 $ / 8,00 $ | 2,80 $ / 11,20 $ | + | GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,56 $ / 2,24 $ | + | GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,14 $ / 0,56 $ | + | o1 | 15,00 $ / 60,00 $ | 21,00 $ / 84,00 $ | + | o3 | 2,00 $ / 8,00 $ | 2,80 $ / 11,20 $ | + | o4 Mini | 1,10 $ / 4,40 $ | 1,54 $ / 6,16 $ | **Anthropic** | Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) | |-------|---------------------------|----------------------------| - | Claude Opus 4.5 | 5,00 $ / 25,00 $ | 10,00 $ / 50,00 $ | - | Claude Opus 4.1 | 15,00 $ / 75,00 $ | 30,00 $ / 150,00 $ | - | Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ | - | Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ | - | Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 2,00 $ / 10,00 $ | + | Claude Opus 4.5 | 5,00 $ / 25,00 $ | 7,00 $ / 35,00 $ | + | Claude Opus 4.1 | 15,00 $ / 75,00 $ | 21,00 $ / 105,00 $ | + | Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 4,20 $ / 21,00 $ | + | Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 4,20 $ / 21,00 $ | + | Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 1,40 $ / 7,00 $ | **Google** | Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) | |-------|---------------------------|----------------------------| - | Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 4,00 $ / 24,00 $ | - | Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ | - | Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,60 $ / 5,00 $ | + | Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 2,80 $ / 16,80 $ | + | Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ | + | Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,42 $ / 3,50 $ | - *Le multiplicateur 2x couvre les coûts d'infrastructure et de gestion des API.* + *Le multiplicateur de 1,4x couvre les coûts d'infrastructure et de gestion des API.* diff --git a/apps/docs/content/docs/ja/execution/costs.mdx b/apps/docs/content/docs/ja/execution/costs.mdx index efbbedaaf4..5c4f1def1e 100644 --- a/apps/docs/content/docs/ja/execution/costs.mdx +++ b/apps/docs/content/docs/ja/execution/costs.mdx @@ -47,42 +47,42 @@ AIブロックを使用するワークフローでは、ログで詳細なコス ## 料金オプション - + - **ホステッドモデル** - Simは2倍の価格乗数でAPIキーを提供します: + **ホステッドモデル** - Simは、エージェントブロック用に1.4倍の価格乗数を適用したAPIキーを提供します: **OpenAI** | モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) | |-------|---------------------------|----------------------------| - | GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 | - | GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 | - | GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 | - | GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 | - | GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 | - | GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 | - | o1 | $15.00 / $60.00 | $30.00 / $120.00 | - | o3 | $2.00 / $8.00 | $4.00 / $16.00 | - | o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 | + | GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 | + | GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 | + | GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 | + | GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 | + | GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 | + | GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 | + | o1 | $15.00 / $60.00 | $21.00 / $84.00 | + | o3 | $2.00 / $8.00 | $2.80 / $11.20 | + | o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 | **Anthropic** | モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) | |-------|---------------------------|----------------------------| - | Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 | - | Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 | - | Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 | + | Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 | + | Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 | + | Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 | **Google** | モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) | |-------|---------------------------|----------------------------| - | Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 | - | Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 | - | Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 | + | Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 | + | Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 | + | Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 | - *2倍の乗数は、インフラストラクチャとAPI管理コストをカバーします。* + *1.4倍の乗数は、インフラストラクチャとAPI管理のコストをカバーします。* diff --git a/apps/docs/content/docs/zh/execution/costs.mdx b/apps/docs/content/docs/zh/execution/costs.mdx index 27348044ec..349039c19e 100644 --- a/apps/docs/content/docs/zh/execution/costs.mdx +++ b/apps/docs/content/docs/zh/execution/costs.mdx @@ -47,42 +47,42 @@ totalCost = baseExecutionCharge + modelCost ## 定价选项 - + - **托管模型** - Sim 提供 API 密钥,价格为基础价格的 2 倍: + **托管模型** - Sim 为 Agent 模块提供 API Key,价格乘以 1.4 倍: **OpenAI** | 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) | |-------|---------------------------|----------------------------| - | GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 | - | GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 | - | GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 | - | GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 | - | GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 | - | GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 | - | GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 | - | o1 | $15.00 / $60.00 | $30.00 / $120.00 | - | o3 | $2.00 / $8.00 | $4.00 / $16.00 | - | o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 | + | GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 | + | GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 | + | GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 | + | GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 | + | GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 | + | GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 | + | GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 | + | o1 | $15.00 / $60.00 | $21.00 / $84.00 | + | o3 | $2.00 / $8.00 | $2.80 / $11.20 | + | o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 | **Anthropic** | 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) | |-------|---------------------------|----------------------------| - | Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 | - | Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 | - | Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 | - | Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 | + | Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 | + | Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 | + | Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 | + | Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 | **Google** | 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) | |-------|---------------------------|----------------------------| - | Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 | - | Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 | - | Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 | + | Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 | + | Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 | + | Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 | - *2 倍系数涵盖了基础设施和 API 管理成本。* + *1.4 倍的系数涵盖了基础设施和 API 管理成本。* diff --git a/apps/docs/i18n.lock b/apps/docs/i18n.lock index 99e4ad41d1..dbc142b5bd 100644 --- a/apps/docs/i18n.lock +++ b/apps/docs/i18n.lock @@ -4581,11 +4581,11 @@ checksums: content/10: d19c8c67f52eb08b6a49c0969a9c8b86 content/11: 4024a36e0d9479ff3191fb9cd2b2e365 content/12: 0396a1e5d9548207f56e6b6cae85a542 - content/13: 4bfdeac5ad21c75209dcdfde85aa52b0 - content/14: 35df9a16b866dbe4bb9fc1d7aee42711 - content/15: 135c044066cea8cc0e22f06d67754ec5 - content/16: 6882b91e30548d7d331388c26cf2e948 - content/17: 29aed7061148ae46fa6ec8bcbc857c3d + content/13: 68f90237f86be125224c56a2643904a3 + content/14: e854781f0fbf6f397a3ac682e892a993 + content/15: 2340c44af715fb8ca58f43151515aae1 + content/16: fc7ae93bff492d80f4b6f16e762e05fa + content/17: 8a46692d5df3fed9f94d59dfc3fb7e0a content/18: e0571c88ea5bcd4305a6f5772dcbed98 content/19: 83fc31418ff454a5e06b290e3708ef32 content/20: 4392b5939a6d5774fb080cad1ee1dbb8 From 05bbf34265bf4f2bd691b3d12fdaac644e82fe42 Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 9 Jan 2026 14:48:23 -0800 Subject: [PATCH 04/16] improvement(canvas): add multi-block select, add batch handle, enabled, and edge operations (#2738) * improvement(canvas): add multi-block select, add batch handle, enabled, and edge operations * feat(i18n): update translations (#2732) Co-authored-by: icecrasher321 * don't allow flip handles for subflows * ack PR comments * more * fix missing handler * remove dead subflow-specific ops * remove unused code * fixed subflow ops * keep edges on subflow actions intact * fix subflow resizing * fix remove from subflow bulk * improvement(canvas): add multi-block select, add batch handle, enabled, and edge operations * don't allow flip handles for subflows * ack PR comments * more * fix missing handler * remove dead subflow-specific ops * remove unused code * fixed subflow ops * fix subflow resizing * keep edges on subflow actions intact * fixed copy from inside subflow * types improvement, preview fixes * fetch varible data in deploy modal * moved remove from subflow one position to the right * fix subflow issues * address greptile comment * fix test * improvement(preview): ui/ux * fix(preview): subflows * added batch add edges * removed recovery * use consolidated consts for sockets operations * more --------- Co-authored-by: icecrasher321 Co-authored-by: Vikhyath Mondreti Co-authored-by: Emir Karabeg --- apps/sim/app/(landing)/privacy/page.tsx | 2 +- apps/sim/app/_styles/globals.css | 34 + apps/sim/app/api/chat/[identifier]/route.ts | 2 +- apps/sim/app/api/templates/[id]/route.ts | 5 +- apps/sim/app/api/templates/[id]/use/route.ts | 10 +- apps/sim/app/api/v1/admin/types.ts | 34 +- .../workflows/[id]/variables/route.test.ts | 25 +- .../app/api/workflows/[id]/variables/route.ts | 42 +- apps/sim/app/templates/[id]/template.tsx | 1 - .../templates/components/template-card.tsx | 1 - .../execution-snapshot/components/index.ts | 1 + .../components/snapshot-context-menu.tsx | 97 ++ .../execution-snapshot/execution-snapshot.tsx | 156 ++- .../components/trace-spans/trace-spans.tsx | 172 ++- .../log-row-context-menu.tsx | 2 +- .../templates/components/template-card.tsx | 1 - .../w/[workflowId]/components/chat/chat.tsx | 13 +- .../context-menu/block-context-menu.tsx | 2 +- .../components/usage-limit-actions.tsx | 4 +- .../components/general/general.tsx | 55 +- .../components/template/template.tsx | 2 - .../credential-selector.tsx | 4 +- .../schedule-info/schedule-info.tsx | 4 +- .../components/tool-input/tool-input.tsx | 30 +- .../components/trigger-save/trigger-save.tsx | 6 +- .../toolbar/components/drag-preview.ts | 5 +- .../panel/hooks/use-usage-limits.ts | 5 +- .../components/subflows/subflow-node.tsx | 10 +- .../components/log-row-context-menu.tsx | 24 +- .../components/terminal/terminal.tsx | 166 +-- .../components/action-bar/action-bar.tsx | 46 +- .../workflow-block/hooks/use-webhook-info.ts | 8 +- .../components/workflow-block/types.ts | 2 + .../components/workflow-block/utils.ts | 1 + .../workflow-block/workflow-block.tsx | 6 +- .../workflow-edge/workflow-edge.tsx | 28 +- .../hooks/{use-float => float}/index.ts | 0 .../use-float-boundary-sync.ts | 0 .../{use-float => float}/use-float-drag.ts | 0 .../{use-float => float}/use-float-resize.ts | 0 .../w/[workflowId]/hooks/index.ts | 11 +- .../w/[workflowId]/hooks/use-block-visual.ts | 14 +- .../[workflowId]/hooks/use-node-utilities.ts | 79 +- .../w/[workflowId]/utils/block-ring-utils.ts | 24 +- .../utils/workflow-canvas-helpers.ts | 181 +++ .../[workspaceId]/w/[workflowId]/workflow.tsx | 896 ++++++++++---- .../components/block-details-sidebar.tsx | 1099 ++++++++++++++--- .../w/components/preview/components/block.tsx | 68 +- .../components/preview/components/subflow.tsx | 57 +- .../w/components/preview/index.ts | 2 +- .../w/components/preview/preview.tsx | 220 +++- .../settings-modal/settings-modal.tsx | 2 +- .../w/hooks/use-export-workflow.ts | 12 +- .../w/hooks/use-export-workspace.ts | 27 +- .../w/hooks/use-import-workflow.ts | 45 +- .../w/hooks/use-import-workspace.ts | 45 +- .../emails/components/email-footer.tsx | 2 +- .../executor/__test-utils__/executor-mocks.ts | 12 +- apps/sim/executor/constants.ts | 6 +- .../handlers/router/router-handler.ts | 10 +- apps/sim/hooks/queries/subscription.ts | 15 +- apps/sim/hooks/use-code-viewer.ts | 155 +++ apps/sim/hooks/use-collaborative-workflow.ts | 562 ++++++--- apps/sim/hooks/use-forwarded-ref.ts | 25 - apps/sim/hooks/use-subscription-state.ts | 217 ---- apps/sim/hooks/use-undo-redo.ts | 887 +++++++++---- apps/sim/hooks/use-webhook-management.ts | 6 +- .../sim/lib/logs/execution/logging-factory.ts | 14 +- .../logs/execution/snapshot/service.test.ts | 112 +- .../lib/logs/execution/snapshot/service.ts | 4 + apps/sim/lib/mcp/workflow-mcp-sync.ts | 3 +- .../lib/messaging/email/validation.test.ts | 9 + apps/sim/lib/workflows/autolayout/types.ts | 27 +- .../sim/lib/workflows/blocks/block-outputs.ts | 83 +- apps/sim/lib/workflows/comparison/compare.ts | 13 +- .../workflows/comparison/normalize.test.ts | 51 +- .../sim/lib/workflows/comparison/normalize.ts | 113 +- .../credentials/credential-extractor.ts | 78 +- apps/sim/lib/workflows/diff/diff-engine.ts | 12 +- .../workflows/executor/execute-workflow.ts | 13 +- .../lib/workflows/executor/execution-core.ts | 38 +- .../executor/human-in-the-loop-manager.ts | 47 +- .../lib/workflows/operations/import-export.ts | 9 +- apps/sim/lib/workflows/persistence/utils.ts | 70 +- .../workflows/sanitization/json-sanitizer.ts | 88 +- .../lib/workflows/sanitization/validation.ts | 76 +- apps/sim/lib/workflows/streaming/streaming.ts | 40 +- .../training/compute-edit-sequence.ts | 105 +- .../lib/workflows/triggers/trigger-utils.ts | 26 +- .../workflows/variables/variable-manager.ts | 21 +- apps/sim/scripts/export-workflow.ts | 15 +- apps/sim/serializer/index.test.ts | 2 +- apps/sim/serializer/index.ts | 2 +- apps/sim/serializer/types.ts | 6 +- apps/sim/socket/constants.ts | 96 ++ apps/sim/socket/database/operations.ts | 242 +++- apps/sim/socket/handlers/operations.ts | 191 ++- apps/sim/socket/middleware/permissions.ts | 10 + apps/sim/socket/validation/schemas.ts | 132 +- apps/sim/stores/panel/variables/types.ts | 2 +- apps/sim/stores/undo-redo/store.test.ts | 23 +- apps/sim/stores/undo-redo/store.ts | 128 +- apps/sim/stores/undo-redo/types.ts | 122 +- apps/sim/stores/undo-redo/utils.ts | 232 +--- apps/sim/stores/workflows/subblock/types.ts | 26 +- apps/sim/stores/workflows/utils.ts | 76 +- apps/sim/stores/workflows/workflow/store.ts | 68 +- apps/sim/stores/workflows/workflow/types.ts | 24 +- packages/testing/src/factories/index.ts | 12 +- .../src/factories/undo-redo.factory.ts | 173 ++- 110 files changed, 5922 insertions(+), 2397 deletions(-) create mode 100644 apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/components/index.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/components/snapshot-context-menu.tsx rename apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/{use-float => float}/index.ts (100%) rename apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/{use-float => float}/use-float-boundary-sync.ts (100%) rename apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/{use-float => float}/use-float-drag.ts (100%) rename apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/{use-float => float}/use-float-resize.ts (100%) create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-canvas-helpers.ts create mode 100644 apps/sim/hooks/use-code-viewer.ts delete mode 100644 apps/sim/hooks/use-forwarded-ref.ts delete mode 100644 apps/sim/hooks/use-subscription-state.ts create mode 100644 apps/sim/socket/constants.ts diff --git a/apps/sim/app/(landing)/privacy/page.tsx b/apps/sim/app/(landing)/privacy/page.tsx index 7f6a0ee7d6..a32e2b980c 100644 --- a/apps/sim/app/(landing)/privacy/page.tsx +++ b/apps/sim/app/(landing)/privacy/page.tsx @@ -767,7 +767,7 @@ export default function PrivacyPolicy() { privacy@sim.ai -
  • Mailing Address: Sim, 80 Langton St, San Francisco, CA 94133, USA
  • +
  • Mailing Address: Sim, 80 Langton St, San Francisco, CA 94103, USA
  • We will respond to your request within a reasonable timeframe.

    diff --git a/apps/sim/app/_styles/globals.css b/apps/sim/app/_styles/globals.css index b94f9a2e58..eaac62a570 100644 --- a/apps/sim/app/_styles/globals.css +++ b/apps/sim/app/_styles/globals.css @@ -42,6 +42,40 @@ animation: dash-animation 1.5s linear infinite !important; } +/** + * React Flow selection box styling + * Uses brand-secondary color for selection highlighting + */ +.react-flow__selection { + background: rgba(51, 180, 255, 0.08) !important; + border: 1px solid var(--brand-secondary) !important; +} + +.react-flow__nodesselection-rect, +.react-flow__nodesselection { + background: transparent !important; + border: none !important; + pointer-events: none !important; +} + +/** + * Selected node ring indicator + * Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40) + */ +.react-flow__node.selected > div > div { + position: relative; +} + +.react-flow__node.selected > div > div::after { + content: ""; + position: absolute; + inset: 0; + z-index: 40; + border-radius: 8px; + box-shadow: 0 0 0 1.75px var(--brand-secondary); + pointer-events: none; +} + /** * Color tokens - single source of truth for all colors * Light mode: Warm theme diff --git a/apps/sim/app/api/chat/[identifier]/route.ts b/apps/sim/app/api/chat/[identifier]/route.ts index 5754d38b24..ac9a1c3206 100644 --- a/apps/sim/app/api/chat/[identifier]/route.ts +++ b/apps/sim/app/api/chat/[identifier]/route.ts @@ -253,7 +253,7 @@ export async function POST( userId: deployment.userId, workspaceId, isDeployed: workflowRecord?.isDeployed ?? false, - variables: workflowRecord?.variables || {}, + variables: (workflowRecord?.variables as Record) ?? undefined, } const stream = await createStreamingResponse({ diff --git a/apps/sim/app/api/templates/[id]/route.ts b/apps/sim/app/api/templates/[id]/route.ts index 5e1e4e8c94..bc38d2dd56 100644 --- a/apps/sim/app/api/templates/[id]/route.ts +++ b/apps/sim/app/api/templates/[id]/route.ts @@ -10,6 +10,7 @@ import { extractRequiredCredentials, sanitizeCredentials, } from '@/lib/workflows/credentials/credential-extractor' +import type { WorkflowState } from '@/stores/workflows/workflow/types' const logger = createLogger('TemplateByIdAPI') @@ -189,12 +190,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ .where(eq(workflow.id, template.workflowId)) .limit(1) - const currentState = { + const currentState: Partial = { blocks: normalizedData.blocks, edges: normalizedData.edges, loops: normalizedData.loops, parallels: normalizedData.parallels, - variables: workflowRecord?.variables || undefined, + variables: (workflowRecord?.variables as WorkflowState['variables']) ?? undefined, lastSaved: Date.now(), } diff --git a/apps/sim/app/api/templates/[id]/use/route.ts b/apps/sim/app/api/templates/[id]/use/route.ts index 4ad3bda21e..59c5466871 100644 --- a/apps/sim/app/api/templates/[id]/use/route.ts +++ b/apps/sim/app/api/templates/[id]/use/route.ts @@ -7,7 +7,10 @@ import { v4 as uuidv4 } from 'uuid' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' import { getBaseUrl } from '@/lib/core/utils/urls' -import { regenerateWorkflowStateIds } from '@/lib/workflows/persistence/utils' +import { + type RegenerateStateInput, + regenerateWorkflowStateIds, +} from '@/lib/workflows/persistence/utils' const logger = createLogger('TemplateUseAPI') @@ -104,9 +107,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ // Step 2: Regenerate IDs when creating a copy (not when connecting/editing template) // When connecting to template (edit mode), keep original IDs // When using template (copy mode), regenerate all IDs to avoid conflicts + const templateState = templateData.state as RegenerateStateInput const workflowState = connectToTemplate - ? templateData.state - : regenerateWorkflowStateIds(templateData.state) + ? templateState + : regenerateWorkflowStateIds(templateState) // Step 3: Save the workflow state using the existing state endpoint (like imports do) // Ensure variables in state are remapped for the new workflow as well diff --git a/apps/sim/app/api/v1/admin/types.ts b/apps/sim/app/api/v1/admin/types.ts index 4c3916810c..fbc12ae7ec 100644 --- a/apps/sim/app/api/v1/admin/types.ts +++ b/apps/sim/app/api/v1/admin/types.ts @@ -243,7 +243,7 @@ export interface WorkflowExportState { color?: string exportedAt?: string } - variables?: WorkflowVariable[] + variables?: Record } export interface WorkflowExportPayload { @@ -317,36 +317,44 @@ export interface WorkspaceImportResponse { // ============================================================================= /** - * Parse workflow variables from database JSON format to array format. - * Handles both array and Record formats. + * Parse workflow variables from database JSON format to Record format. + * Handles both legacy Array and current Record formats. */ export function parseWorkflowVariables( dbVariables: DbWorkflow['variables'] -): WorkflowVariable[] | undefined { +): Record | undefined { if (!dbVariables) return undefined try { const varsObj = typeof dbVariables === 'string' ? JSON.parse(dbVariables) : dbVariables + // Handle legacy Array format by converting to Record if (Array.isArray(varsObj)) { - return varsObj.map((v) => ({ - id: v.id, - name: v.name, - type: v.type, - value: v.value, - })) + const result: Record = {} + for (const v of varsObj) { + result[v.id] = { + id: v.id, + name: v.name, + type: v.type, + value: v.value, + } + } + return result } + // Already Record format - normalize and return if (typeof varsObj === 'object' && varsObj !== null) { - return Object.values(varsObj).map((v: unknown) => { + const result: Record = {} + for (const [key, v] of Object.entries(varsObj)) { const variable = v as { id: string; name: string; type: VariableType; value: unknown } - return { + result[key] = { id: variable.id, name: variable.name, type: variable.type, value: variable.value, } - }) + } + return result } } catch { // pass diff --git a/apps/sim/app/api/workflows/[id]/variables/route.test.ts b/apps/sim/app/api/workflows/[id]/variables/route.test.ts index f7e105d3c9..b2485fa408 100644 --- a/apps/sim/app/api/workflows/[id]/variables/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/variables/route.test.ts @@ -207,9 +207,15 @@ describe('Workflow Variables API Route', () => { update: { results: [{}] }, }) - const variables = [ - { id: 'var-1', workflowId: 'workflow-123', name: 'test', type: 'string', value: 'hello' }, - ] + const variables = { + 'var-1': { + id: 'var-1', + workflowId: 'workflow-123', + name: 'test', + type: 'string', + value: 'hello', + }, + } const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables', { method: 'POST', @@ -242,9 +248,15 @@ describe('Workflow Variables API Route', () => { isWorkspaceOwner: false, }) - const variables = [ - { id: 'var-1', workflowId: 'workflow-123', name: 'test', type: 'string', value: 'hello' }, - ] + const variables = { + 'var-1': { + id: 'var-1', + workflowId: 'workflow-123', + name: 'test', + type: 'string', + value: 'hello', + }, + } const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables', { method: 'POST', @@ -277,7 +289,6 @@ describe('Workflow Variables API Route', () => { isWorkspaceOwner: false, }) - // Invalid data - missing required fields const invalidData = { variables: [{ name: 'test' }] } const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables', { diff --git a/apps/sim/app/api/workflows/[id]/variables/route.ts b/apps/sim/app/api/workflows/[id]/variables/route.ts index ec7d5d486f..f107f31748 100644 --- a/apps/sim/app/api/workflows/[id]/variables/route.ts +++ b/apps/sim/app/api/workflows/[id]/variables/route.ts @@ -11,16 +11,22 @@ import type { Variable } from '@/stores/panel/variables/types' const logger = createLogger('WorkflowVariablesAPI') +const VariableSchema = z.object({ + id: z.string(), + workflowId: z.string(), + name: z.string(), + type: z.enum(['string', 'number', 'boolean', 'object', 'array', 'plain']), + value: z.union([ + z.string(), + z.number(), + z.boolean(), + z.record(z.unknown()), + z.array(z.unknown()), + ]), +}) + const VariablesSchema = z.object({ - variables: z.array( - z.object({ - id: z.string(), - workflowId: z.string(), - name: z.string(), - type: z.enum(['string', 'number', 'boolean', 'object', 'array', 'plain']), - value: z.union([z.string(), z.number(), z.boolean(), z.record(z.any()), z.array(z.any())]), - }) - ), + variables: z.record(z.string(), VariableSchema), }) export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { @@ -60,21 +66,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: try { const { variables } = VariablesSchema.parse(body) - // Format variables for storage - const variablesRecord: Record = {} - variables.forEach((variable) => { - variablesRecord[variable.id] = variable - }) - - // Replace variables completely with the incoming ones + // Variables are already in Record format - use directly // The frontend is the source of truth for what variables should exist - const updatedVariables = variablesRecord - - // Update workflow with variables await db .update(workflow) .set({ - variables: updatedVariables, + variables, updatedAt: new Date(), }) .where(eq(workflow.id, workflowId)) @@ -148,8 +145,9 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: headers, } ) - } catch (error: any) { + } catch (error) { logger.error(`[${requestId}] Workflow variables fetch error`, error) - return NextResponse.json({ error: error.message }, { status: 500 }) + const errorMessage = error instanceof Error ? error.message : 'Unknown error' + return NextResponse.json({ error: errorMessage }, { status: 500 }) } } diff --git a/apps/sim/app/templates/[id]/template.tsx b/apps/sim/app/templates/[id]/template.tsx index fe26fd1558..ac09e5af9d 100644 --- a/apps/sim/app/templates/[id]/template.tsx +++ b/apps/sim/app/templates/[id]/template.tsx @@ -332,7 +332,6 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template return ( + onClose: () => void + onCopy: () => void + onSearch?: () => void + wrapText?: boolean + onToggleWrap?: () => void + /** When true, only shows Copy option (for subblock values) */ + copyOnly?: boolean +} + +/** + * Context menu for execution snapshot sidebar. + * Provides copy, search, and display options. + * Uses createPortal to render outside any transformed containers (like modals). + */ +export function SnapshotContextMenu({ + isOpen, + position, + menuRef, + onClose, + onCopy, + onSearch, + wrapText, + onToggleWrap, + copyOnly = false, +}: SnapshotContextMenuProps) { + if (typeof document === 'undefined') return null + + return createPortal( + + + + { + onCopy() + onClose() + }} + > + Copy + + + {!copyOnly && onSearch && ( + <> + + { + onSearch() + onClose() + }} + > + Search + + + )} + + {!copyOnly && onToggleWrap && ( + <> + + + Wrap Text + + + )} + + , + document.body + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/execution-snapshot.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/execution-snapshot.tsx index bfda572622..49d0e316c5 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/execution-snapshot.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/execution-snapshot.tsx @@ -1,12 +1,23 @@ 'use client' -import { useEffect, useMemo, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { AlertCircle, Loader2 } from 'lucide-react' -import { Modal, ModalBody, ModalContent, ModalHeader } from '@/components/emcn' +import { createPortal } from 'react-dom' +import { + Modal, + ModalBody, + ModalContent, + ModalHeader, + Popover, + PopoverAnchor, + PopoverContent, + PopoverItem, +} from '@/components/emcn' import { redactApiKeys } from '@/lib/core/security/redaction' import { cn } from '@/lib/core/utils/cn' import { BlockDetailsSidebar, + getLeftmostBlockId, WorkflowPreview, } from '@/app/workspace/[workspaceId]/w/components/preview' import { useExecutionSnapshot } from '@/hooks/queries/logs' @@ -60,6 +71,46 @@ export function ExecutionSnapshot({ }: ExecutionSnapshotProps) { const { data, isLoading, error } = useExecutionSnapshot(executionId) const [pinnedBlockId, setPinnedBlockId] = useState(null) + const autoSelectedForExecutionRef = useRef(null) + + const [isMenuOpen, setIsMenuOpen] = useState(false) + const [menuPosition, setMenuPosition] = useState({ x: 0, y: 0 }) + const [contextMenuBlockId, setContextMenuBlockId] = useState(null) + const menuRef = useRef(null) + + const closeMenu = useCallback(() => { + setIsMenuOpen(false) + setContextMenuBlockId(null) + }, []) + + const handleCanvasContextMenu = useCallback((e: React.MouseEvent) => { + e.preventDefault() + e.stopPropagation() + setContextMenuBlockId(null) + setMenuPosition({ x: e.clientX, y: e.clientY }) + setIsMenuOpen(true) + }, []) + + const handleNodeContextMenu = useCallback( + (blockId: string, mousePosition: { x: number; y: number }) => { + setContextMenuBlockId(blockId) + setMenuPosition(mousePosition) + setIsMenuOpen(true) + }, + [] + ) + + const handleCopyExecutionId = useCallback(() => { + navigator.clipboard.writeText(executionId) + closeMenu() + }, [executionId, closeMenu]) + + const handleOpenDetails = useCallback(() => { + if (contextMenuBlockId) { + setPinnedBlockId(contextMenuBlockId) + } + closeMenu() + }, [contextMenuBlockId, closeMenu]) const blockExecutions = useMemo(() => { if (!traceSpans || !Array.isArray(traceSpans)) return {} @@ -97,12 +148,21 @@ export function ExecutionSnapshot({ return blockExecutionMap }, [traceSpans]) - useEffect(() => { - setPinnedBlockId(null) - }, [executionId]) - const workflowState = data?.workflowState as WorkflowState | undefined + // Auto-select the leftmost block once when data loads for a new executionId + useEffect(() => { + if ( + workflowState && + !isMigratedWorkflowState(workflowState) && + autoSelectedForExecutionRef.current !== executionId + ) { + autoSelectedForExecutionRef.current = executionId + const leftmostId = getLeftmostBlockId(workflowState) + setPinnedBlockId(leftmostId) + } + }, [executionId, workflowState]) + const renderContent = () => { if (isLoading) { return ( @@ -169,22 +229,26 @@ export function ExecutionSnapshot({
    -
    +
    { - setPinnedBlockId((prev) => (prev === blockId ? null : blockId)) + setPinnedBlockId(blockId) }} + onNodeContextMenu={handleNodeContextMenu} + onPaneClick={() => setPinnedBlockId(null)} cursorStyle='pointer' executedBlocks={blockExecutions} + selectedBlockId={pinnedBlockId} + lightweight />
    {pinnedBlockId && workflowState.blocks[pinnedBlockId] && ( @@ -193,32 +257,74 @@ export function ExecutionSnapshot({ executionData={blockExecutions[pinnedBlockId]} allBlockExecutions={blockExecutions} workflowBlocks={workflowState.blocks} + workflowVariables={workflowState.variables} + loops={workflowState.loops} + parallels={workflowState.parallels} isExecutionMode + onClose={() => setPinnedBlockId(null)} /> )}
    ) } + const canvasContextMenu = + typeof document !== 'undefined' + ? createPortal( + + + + {contextMenuBlockId && ( + Open Details + )} + Copy Execution ID + + , + document.body + ) + : null + if (isModal) { return ( - { - if (!open) { - setPinnedBlockId(null) - onClose() - } - }} - > - - Workflow State + <> + { + if (!open) { + setPinnedBlockId(null) + onClose() + } + }} + > + + Workflow State - {renderContent()} - - + {renderContent()} + + + {canvasContextMenu} + ) } - return renderContent() + return ( + <> + {renderContent()} + {canvasContextMenu} + + ) } diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/trace-spans/trace-spans.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/trace-spans/trace-spans.tsx index 46a7444ccc..fa16d94240 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/trace-spans/trace-spans.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/components/trace-spans/trace-spans.tsx @@ -1,13 +1,27 @@ 'use client' import type React from 'react' -import { memo, useCallback, useMemo, useState } from 'react' +import { memo, useCallback, useMemo, useRef, useState } from 'react' import clsx from 'clsx' -import { ChevronDown, Code } from '@/components/emcn' +import { ArrowDown, ArrowUp, X } from 'lucide-react' +import { createPortal } from 'react-dom' +import { + Button, + ChevronDown, + Code, + Input, + Popover, + PopoverAnchor, + PopoverContent, + PopoverDivider, + PopoverItem, +} from '@/components/emcn' import { WorkflowIcon } from '@/components/icons' +import { cn } from '@/lib/core/utils/cn' import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config' import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config' import { getBlock, getBlockByToolName } from '@/blocks' +import { useCodeViewerFeatures } from '@/hooks/use-code-viewer' import type { TraceSpan } from '@/stores/logs/filters/types' interface TraceSpansProps { @@ -370,7 +384,7 @@ function SpanContent({ } /** - * Renders input/output section with collapsible content + * Renders input/output section with collapsible content, context menu, and search */ function InputOutputSection({ label, @@ -391,14 +405,63 @@ function InputOutputSection({ }) { const sectionKey = `${spanId}-${sectionType}` const isExpanded = expandedSections.has(sectionKey) + const contentRef = useRef(null) + const menuRef = useRef(null) + + // Context menu state + const [isContextMenuOpen, setIsContextMenuOpen] = useState(false) + const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 }) + + // Code viewer features + const { + wrapText, + toggleWrapText, + isSearchActive, + searchQuery, + setSearchQuery, + matchCount, + currentMatchIndex, + activateSearch, + closeSearch, + goToNextMatch, + goToPreviousMatch, + handleMatchCountChange, + searchInputRef, + } = useCodeViewerFeatures({ contentRef }) const jsonString = useMemo(() => { if (!data) return '' return JSON.stringify(data, null, 2) }, [data]) + const handleContextMenu = useCallback((e: React.MouseEvent) => { + e.preventDefault() + e.stopPropagation() + setContextMenuPosition({ x: e.clientX, y: e.clientY }) + setIsContextMenuOpen(true) + }, []) + + const closeContextMenu = useCallback(() => { + setIsContextMenuOpen(false) + }, []) + + const handleCopy = useCallback(() => { + navigator.clipboard.writeText(jsonString) + closeContextMenu() + }, [jsonString, closeContextMenu]) + + const handleSearch = useCallback(() => { + activateSearch() + closeContextMenu() + }, [activateSearch, closeContextMenu]) + + const handleToggleWrap = useCallback(() => { + toggleWrapText() + closeContextMenu() + }, [toggleWrapText, closeContextMenu]) + return ( -
    +
    onToggle(sectionKey)} @@ -433,12 +496,101 @@ function InputOutputSection({ />
    {isExpanded && ( - + <> +
    + +
    + + {/* Search Overlay */} + {isSearchActive && ( +
    e.stopPropagation()} + > + setSearchQuery(e.target.value)} + placeholder='Search...' + className='mr-[2px] h-[23px] w-[94px] text-[12px]' + /> + 0 ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]' + )} + > + {matchCount > 0 ? `${currentMatchIndex + 1}/${matchCount}` : '0/0'} + + + + +
    + )} + + {/* Context Menu - rendered in portal to avoid transform/overflow clipping */} + {typeof document !== 'undefined' && + createPortal( + + + + Copy + + Search + + Wrap Text + + + , + document.body + )} + )}
    ) diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx index f25a71732f..56c8cdab00 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx @@ -87,7 +87,7 @@ export function LogRowContextMenu({ onClose() }} > - Open Preview + Open Snapshot {/* Filter actions */} diff --git a/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx b/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx index 730c82e54d..f5c1fd0630 100644 --- a/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx +++ b/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx @@ -210,7 +210,6 @@ function TemplateCardInner({ {normalizedState && isInView ? ( ) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault() - handleSendMessage() + if (!isStreaming && !isExecuting) { + handleSendMessage() + } } else if (e.key === 'ArrowUp') { e.preventDefault() if (promptHistory.length > 0) { @@ -749,7 +751,7 @@ export function Chat() { } } }, - [handleSendMessage, promptHistory, historyIndex] + [handleSendMessage, promptHistory, historyIndex, isStreaming, isExecuting] ) /** @@ -1061,7 +1063,7 @@ export function Chat() { onKeyDown={handleKeyPress} placeholder={isDragOver ? 'Drop files here...' : 'Type a message...'} className='w-full border-0 bg-transparent pr-[56px] pl-[4px] shadow-none focus-visible:ring-0 focus-visible:ring-offset-0' - disabled={!activeWorkflowId || isExecuting} + disabled={!activeWorkflowId} /> {/* Buttons positioned absolutely on the right */} @@ -1091,7 +1093,8 @@ export function Chat() { disabled={ (!chatMessage.trim() && chatFiles.length === 0) || !activeWorkflowId || - isExecuting + isExecuting || + isStreaming } className={cn( 'h-[22px] w-[22px] rounded-full p-0 transition-colors', diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/block-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/block-context-menu.tsx index 6ae1f22e09..547098f7bd 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/block-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/block-context-menu.tsx @@ -118,7 +118,7 @@ export function BlockContextMenu({ {getToggleEnabledLabel()} )} - {!allNoteBlocks && ( + {!allNoteBlocks && !isSubflow && ( { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/usage-limit-actions.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/usage-limit-actions.tsx index 0683694588..a4cc01b5fe 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/usage-limit-actions.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/usage-limit-actions.tsx @@ -4,10 +4,12 @@ import { useState } from 'react' import { Loader2 } from 'lucide-react' import { Button } from '@/components/emcn' import { canEditUsageLimit } from '@/lib/billing/subscriptions/utils' +import { getEnv, isTruthy } from '@/lib/core/config/env' import { isHosted } from '@/lib/core/config/feature-flags' import { useSubscriptionData, useUpdateUsageLimit } from '@/hooks/queries/subscription' import { useCopilotStore } from '@/stores/panel/copilot/store' +const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED')) const LIMIT_INCREMENTS = [0, 50, 100] as const function roundUpToNearest50(value: number): number { @@ -15,7 +17,7 @@ function roundUpToNearest50(value: number): number { } export function UsageLimitActions() { - const { data: subscriptionData } = useSubscriptionData() + const { data: subscriptionData } = useSubscriptionData({ enabled: isBillingEnabled }) const updateUsageLimitMutation = useUpdateUsageLimit() const subscription = subscriptionData?.data diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx index 1b931fee56..fecc1327d4 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/general/general.tsx @@ -1,6 +1,6 @@ 'use client' -import { useCallback, useEffect, useMemo, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { Maximize2 } from 'lucide-react' import { @@ -17,6 +17,7 @@ import { Skeleton } from '@/components/ui' import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils' import { BlockDetailsSidebar, + getLeftmostBlockId, WorkflowPreview, } from '@/app/workspace/[workspaceId]/w/components/preview' import { useDeploymentVersionState, useRevertToVersion } from '@/hooks/queries/workflows' @@ -57,6 +58,7 @@ export function GeneralDeploy({ const [showPromoteDialog, setShowPromoteDialog] = useState(false) const [showExpandedPreview, setShowExpandedPreview] = useState(false) const [expandedSelectedBlockId, setExpandedSelectedBlockId] = useState(null) + const hasAutoSelectedRef = useRef(false) const [versionToLoad, setVersionToLoad] = useState(null) const [versionToPromote, setVersionToPromote] = useState(null) @@ -131,6 +133,19 @@ export function GeneralDeploy({ const hasDeployedData = deployedState && Object.keys(deployedState.blocks || {}).length > 0 const showLoadingSkeleton = isLoadingDeployedState && !hasDeployedData + // Auto-select the leftmost block once when expanded preview opens + useEffect(() => { + if (showExpandedPreview && workflowToShow && !hasAutoSelectedRef.current) { + hasAutoSelectedRef.current = true + const leftmostId = getLeftmostBlockId(workflowToShow) + setExpandedSelectedBlockId(leftmostId) + } + // Reset when modal closes + if (!showExpandedPreview) { + hasAutoSelectedRef.current = false + } + }, [showExpandedPreview, workflowToShow]) + if (showLoadingSkeleton) { return (
    @@ -186,7 +201,7 @@ export function GeneralDeploy({
    { if (e.ctrlKey || e.metaKey) return e.stopPropagation() @@ -194,28 +209,28 @@ export function GeneralDeploy({ > {workflowToShow ? ( <> - +
    + +
    - Expand preview + See preview ) : ( @@ -316,21 +331,23 @@ export function GeneralDeploy({
    { - setExpandedSelectedBlockId( - expandedSelectedBlockId === blockId ? null : blockId - ) + setExpandedSelectedBlockId(blockId) }} - cursorStyle='pointer' + onPaneClick={() => setExpandedSelectedBlockId(null)} + selectedBlockId={expandedSelectedBlockId} + lightweight />
    {expandedSelectedBlockId && workflowToShow.blocks?.[expandedSelectedBlockId] && ( setExpandedSelectedBlockId(null)} /> )} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/template/template.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/template/template.tsx index 3bd4301250..dd7cdeee9f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/template/template.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/template/template.tsx @@ -488,7 +488,6 @@ const OGCaptureContainer = forwardRef((_, ref) => { > state.getValue(blockId, 'timezone')) + const scheduleTimezone = useSubBlockStore((state) => state.getValue(blockId, 'timezone')) as + | string + | undefined const { data: schedule, isLoading } = useScheduleQuery(workflowId, blockId, { enabled: !isPreview, diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx index 6823e303b4..ac23aa5df7 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx @@ -902,7 +902,22 @@ export function ToolInput({ const [draggedIndex, setDraggedIndex] = useState(null) const [dragOverIndex, setDragOverIndex] = useState(null) const [usageControlPopoverIndex, setUsageControlPopoverIndex] = useState(null) - const { data: customTools = [] } = useCustomTools(workspaceId) + + const value = isPreview ? previewValue : storeValue + + const selectedTools: StoredTool[] = + Array.isArray(value) && + value.length > 0 && + value[0] !== null && + typeof value[0]?.type === 'string' + ? (value as StoredTool[]) + : [] + + const hasReferenceOnlyCustomTools = selectedTools.some( + (tool) => tool.type === 'custom-tool' && tool.customToolId && !tool.code + ) + const shouldFetchCustomTools = !isPreview || hasReferenceOnlyCustomTools + const { data: customTools = [] } = useCustomTools(shouldFetchCustomTools ? workspaceId : '') const { mcpTools, @@ -918,24 +933,15 @@ export function ToolInput({ const mcpDataLoading = mcpLoading || mcpServersLoading const hasRefreshedRef = useRef(false) - const value = isPreview ? previewValue : storeValue - - const selectedTools: StoredTool[] = - Array.isArray(value) && - value.length > 0 && - value[0] !== null && - typeof value[0]?.type === 'string' - ? (value as StoredTool[]) - : [] - const hasMcpTools = selectedTools.some((tool) => tool.type === 'mcp') useEffect(() => { + if (isPreview) return if (hasMcpTools && !hasRefreshedRef.current) { hasRefreshedRef.current = true forceRefreshMcpTools(workspaceId) } - }, [hasMcpTools, forceRefreshMcpTools, workspaceId]) + }, [hasMcpTools, forceRefreshMcpTools, workspaceId, isPreview]) /** * Returns issue info for an MCP tool. diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/trigger-save/trigger-save.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/trigger-save/trigger-save.tsx index af9d11beaf..b66463ca95 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/trigger-save/trigger-save.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/trigger-save/trigger-save.tsx @@ -43,10 +43,12 @@ export function TriggerSave({ const [showDeleteDialog, setShowDeleteDialog] = useState(false) const [isGeneratingTestUrl, setIsGeneratingTestUrl] = useState(false) - const storedTestUrl = useSubBlockStore((state) => state.getValue(blockId, 'testUrl')) + const storedTestUrl = useSubBlockStore((state) => state.getValue(blockId, 'testUrl')) as + | string + | null const storedTestUrlExpiresAt = useSubBlockStore((state) => state.getValue(blockId, 'testUrlExpiresAt') - ) + ) as string | null const isTestUrlExpired = useMemo(() => { if (!storedTestUrlExpiresAt) return true diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/components/drag-preview.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/components/drag-preview.ts index 1ea9bf4e86..e79663e5c0 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/components/drag-preview.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/components/drag-preview.ts @@ -32,7 +32,6 @@ export function createDragPreview(info: DragItemInfo): HTMLElement { z-index: 9999; ` - // Create icon container const iconContainer = document.createElement('div') iconContainer.style.cssText = ` width: 24px; @@ -45,7 +44,6 @@ export function createDragPreview(info: DragItemInfo): HTMLElement { flex-shrink: 0; ` - // Clone the actual icon if provided if (info.iconElement) { const clonedIcon = info.iconElement.cloneNode(true) as HTMLElement clonedIcon.style.width = '16px' @@ -55,11 +53,10 @@ export function createDragPreview(info: DragItemInfo): HTMLElement { iconContainer.appendChild(clonedIcon) } - // Create text element const text = document.createElement('span') text.textContent = info.name text.style.cssText = ` - color: #FFFFFF; + color: var(--text-primary); font-size: 16px; font-weight: 500; white-space: nowrap; diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks/use-usage-limits.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks/use-usage-limits.ts index 2262449303..4bf8846668 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks/use-usage-limits.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks/use-usage-limits.ts @@ -1,5 +1,8 @@ +import { getEnv, isTruthy } from '@/lib/core/config/env' import { useSubscriptionData } from '@/hooks/queries/subscription' +const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED')) + /** * Simplified hook that uses React Query for usage limits. * Provides usage exceeded status from existing subscription data. @@ -12,7 +15,7 @@ export function useUsageLimits(options?: { }) { // For now, we only support user context via React Query // Organization context should use useOrganizationBilling directly - const { data: subscriptionData, isLoading } = useSubscriptionData() + const { data: subscriptionData, isLoading } = useSubscriptionData({ enabled: isBillingEnabled }) const usageExceeded = subscriptionData?.data?.usage?.isExceeded || false diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx index e37f4dd88d..fd1aa196ec 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx @@ -47,6 +47,8 @@ export interface SubflowNodeData { parentId?: string extent?: 'parent' isPreview?: boolean + /** Whether this subflow is selected in preview mode */ + isPreviewSelected?: boolean kind: 'loop' | 'parallel' name?: string } @@ -123,15 +125,17 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps void onFilterByStatus: (status: 'error' | 'info') => void onFilterByRunId: (runId: string) => void + onCopyRunId: (runId: string) => void onClearFilters: () => void onClearConsole: () => void hasActiveFilters: boolean @@ -50,6 +51,7 @@ export function LogRowContextMenu({ onFilterByBlock, onFilterByStatus, onFilterByRunId, + onCopyRunId, onClearFilters, onClearConsole, hasActiveFilters, @@ -79,18 +81,18 @@ export function LogRowContextMenu({ }} /> - {/* Clear filters at top when active */} - {hasActiveFilters && ( + {/* Copy actions */} + {entry && hasRunId && ( <> { - onClearFilters() + onCopyRunId(entry.executionId!) onClose() }} > - Clear All Filters + Copy Run ID - {entry && } + )} @@ -129,6 +131,18 @@ export function LogRowContextMenu({ )} + {/* Clear filters */} + {hasActiveFilters && ( + { + onClearFilters() + onClose() + }} + > + Clear All Filters + + )} + {/* Destructive action */} {(entry || hasActiveFilters) && } (null) const outputContentRef = useRef(null) + const { + isSearchActive: isOutputSearchActive, + searchQuery: outputSearchQuery, + setSearchQuery: setOutputSearchQuery, + matchCount, + currentMatchIndex, + activateSearch: activateOutputSearch, + closeSearch: closeOutputSearch, + goToNextMatch, + goToPreviousMatch, + handleMatchCountChange, + searchInputRef: outputSearchInputRef, + } = useCodeViewerFeatures({ + contentRef: outputContentRef, + externalWrapText: wrapText, + onWrapTextChange: setWrapText, + }) - // Training controls state const [isTrainingEnvEnabled, setIsTrainingEnvEnabled] = useState(false) const showTrainingControls = useGeneralStore((state) => state.showTrainingControls) const { isTraining, toggleModal: toggleTrainingModal, stopTraining } = useCopilotTrainingStore() - // Playground state const [isPlaygroundEnabled, setIsPlaygroundEnabled] = useState(false) - // Terminal resize hooks const { handleMouseDown } = useTerminalResize() const { handleMouseDown: handleOutputPanelResizeMouseDown } = useOutputPanelResize() - // Terminal filters hook const { filters, sortConfig, @@ -370,12 +378,10 @@ export function Terminal() { hasActiveFilters, } = useTerminalFilters() - // Context menu state const [hasSelection, setHasSelection] = useState(false) const [contextMenuEntry, setContextMenuEntry] = useState(null) const [storedSelectionText, setStoredSelectionText] = useState('') - // Context menu hooks const { isOpen: isLogRowMenuOpen, position: logRowMenuPosition, @@ -577,44 +583,6 @@ export function Terminal() { } }, [activeWorkflowId, clearWorkflowConsole]) - const activateOutputSearch = useCallback(() => { - setIsOutputSearchActive(true) - setTimeout(() => { - outputSearchInputRef.current?.focus() - }, 0) - }, []) - - const closeOutputSearch = useCallback(() => { - setIsOutputSearchActive(false) - setOutputSearchQuery('') - setMatchCount(0) - setCurrentMatchIndex(0) - }, []) - - /** - * Navigates to the next match in the search results. - */ - const goToNextMatch = useCallback(() => { - if (matchCount === 0) return - setCurrentMatchIndex((prev) => (prev + 1) % matchCount) - }, [matchCount]) - - /** - * Navigates to the previous match in the search results. - */ - const goToPreviousMatch = useCallback(() => { - if (matchCount === 0) return - setCurrentMatchIndex((prev) => (prev - 1 + matchCount) % matchCount) - }, [matchCount]) - - /** - * Handles match count change from Code.Viewer. - */ - const handleMatchCountChange = useCallback((count: number) => { - setMatchCount(count) - setCurrentMatchIndex(0) - }, []) - const handleClearConsole = useCallback( (e: React.MouseEvent) => { e.stopPropagation() @@ -683,6 +651,14 @@ export function Terminal() { [toggleRunId, closeLogRowMenu] ) + const handleCopyRunId = useCallback( + (runId: string) => { + navigator.clipboard.writeText(runId) + closeLogRowMenu() + }, + [closeLogRowMenu] + ) + const handleClearConsoleFromMenu = useCallback(() => { clearCurrentWorkflowConsole() }, [clearCurrentWorkflowConsole]) @@ -885,66 +861,20 @@ export function Terminal() { }, [expandToLastHeight, selectedEntry, showInput, hasInputData, isExpanded]) /** - * Handle Escape to close search or unselect entry - */ - useEffect(() => { - const handleKeyDown = (e: KeyboardEvent) => { - if (e.key === 'Escape') { - e.preventDefault() - // First close search if active - if (isOutputSearchActive) { - closeOutputSearch() - return - } - // Then unselect entry - if (selectedEntry) { - setSelectedEntry(null) - setAutoSelectEnabled(true) - } - } - } - - window.addEventListener('keydown', handleKeyDown) - return () => window.removeEventListener('keydown', handleKeyDown) - }, [selectedEntry, isOutputSearchActive, closeOutputSearch]) - - /** - * Handle Enter/Shift+Enter for search navigation when search input is focused + * Handle Escape to unselect entry (search close is handled by useCodeViewerFeatures) */ useEffect(() => { const handleKeyDown = (e: KeyboardEvent) => { - if (!isOutputSearchActive) return - - const isSearchInputFocused = document.activeElement === outputSearchInputRef.current - - if (e.key === 'Enter' && isSearchInputFocused && matchCount > 0) { + if (e.key === 'Escape' && !isOutputSearchActive && selectedEntry) { e.preventDefault() - if (e.shiftKey) { - goToPreviousMatch() - } else { - goToNextMatch() - } + setSelectedEntry(null) + setAutoSelectEnabled(true) } } window.addEventListener('keydown', handleKeyDown) return () => window.removeEventListener('keydown', handleKeyDown) - }, [isOutputSearchActive, matchCount, goToNextMatch, goToPreviousMatch]) - - /** - * Scroll to current match when it changes - */ - useEffect(() => { - if (!isOutputSearchActive || matchCount === 0 || !outputContentRef.current) return - - // Find all match elements and scroll to the current one - const matchElements = outputContentRef.current.querySelectorAll('[data-search-match]') - const currentElement = matchElements[currentMatchIndex] - - if (currentElement) { - currentElement.scrollIntoView({ block: 'center' }) - } - }, [currentMatchIndex, isOutputSearchActive, matchCount]) + }, [selectedEntry, isOutputSearchActive]) /** * Adjust output panel width when sidebar or panel width changes. @@ -1414,25 +1344,16 @@ export function Terminal() {
    {/* Run ID */} - - - - {formatRunId(entry.executionId)} - - - {entry.executionId && ( - - {entry.executionId} - + + style={{ color: runIdColor?.text || '#D2D2D2' }} + > + {formatRunId(entry.executionId)} + {/* Duration */} { e.stopPropagation() @@ -1509,7 +1428,7 @@ export function Terminal() { variant='ghost' className={clsx( 'px-[8px] py-[6px] text-[12px]', - showInput && '!text-[var(--text-primary)]' + showInput ? '!text-[var(--text-primary)]' : '!text-[var(--text-tertiary)]' )} onClick={(e) => { e.stopPropagation() @@ -1839,6 +1758,7 @@ export function Terminal() { onFilterByBlock={handleFilterByBlock} onFilterByStatus={handleFilterByStatus} onFilterByRunId={handleFilterByRunId} + onCopyRunId={handleCopyRunId} onClearFilters={() => { clearFilters() closeLogRowMenu() diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/action-bar/action-bar.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/action-bar/action-bar.tsx index 3faa5498ac..e861a7e71e 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/action-bar/action-bar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/action-bar/action-bar.tsx @@ -34,8 +34,8 @@ export const ActionBar = memo( const { collaborativeBatchAddBlocks, collaborativeBatchRemoveBlocks, - collaborativeToggleBlockEnabled, - collaborativeToggleBlockHandles, + collaborativeBatchToggleBlockEnabled, + collaborativeBatchToggleBlockHandles, } = useCollaborativeWorkflow() const { activeWorkflowId } = useWorkflowRegistry() const blocks = useWorkflowStore((state) => state.blocks) @@ -121,7 +121,7 @@ export const ActionBar = memo( onClick={(e) => { e.stopPropagation() if (!disabled) { - collaborativeToggleBlockEnabled(blockId) + collaborativeBatchToggleBlockEnabled([blockId]) } }} className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]' @@ -161,53 +161,53 @@ export const ActionBar = memo( )} - {!isStartBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && ( + {!isNoteBlock && ( - {getTooltipMessage('Remove from Subflow')} + + {getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')} + )} - {!isNoteBlock && ( + {!isStartBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && ( - - {getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')} - + {getTooltipMessage('Remove from Subflow')} )} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/hooks/use-webhook-info.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/hooks/use-webhook-info.ts index 5e1a334a02..160d3b5871 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/hooks/use-webhook-info.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/hooks/use-webhook-info.ts @@ -54,9 +54,11 @@ export function useWebhookInfo(blockId: string, workflowId: string): UseWebhookI useCallback( (state) => { if (!activeWorkflowId) return undefined - return state.workflowValues[activeWorkflowId]?.[blockId]?.webhookProvider?.value as - | string - | undefined + const value = state.workflowValues[activeWorkflowId]?.[blockId]?.webhookProvider + if (typeof value === 'object' && value !== null && 'value' in value) { + return (value as { value?: unknown }).value as string | undefined + } + return value as string | undefined }, [activeWorkflowId, blockId] ) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/types.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/types.ts index e3e3f0146f..be830b6a49 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/types.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/types.ts @@ -10,6 +10,8 @@ export interface WorkflowBlockProps { isActive?: boolean isPending?: boolean isPreview?: boolean + /** Whether this block is selected in preview mode */ + isPreviewSelected?: boolean subBlockValues?: Record blockState?: any } diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils.ts index 1239ec3968..be9c855c95 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils.ts @@ -32,6 +32,7 @@ export function shouldSkipBlockRender( prevProps.data.isActive === nextProps.data.isActive && prevProps.data.isPending === nextProps.data.isPending && prevProps.data.isPreview === nextProps.data.isPreview && + prevProps.data.isPreviewSelected === nextProps.data.isPreviewSelected && prevProps.data.config === nextProps.data.config && prevProps.data.subBlockValues === nextProps.data.subBlockValues && prevProps.data.blockState === nextProps.data.blockState && diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx index 42b8484dc1..efa8ddb2d9 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx @@ -624,7 +624,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({ if (!activeWorkflowId) return const current = useSubBlockStore.getState().workflowValues[activeWorkflowId]?.[id] if (!current) return - const cred = current.credential?.value as string | undefined + const credValue = current.credential + const cred = + typeof credValue === 'object' && credValue !== null && 'value' in credValue + ? ((credValue as { value?: unknown }).value as string | undefined) + : (credValue as string | undefined) if (prevCredRef.current !== cred) { prevCredRef.current = cred const keys = Object.keys(current) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge.tsx index 02d31bd926..b4e7ca1f3f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge.tsx @@ -40,10 +40,7 @@ const WorkflowEdgeComponent = ({ }) const isSelected = data?.isSelected ?? false - const isInsideLoop = data?.isInsideLoop ?? false - const parentLoopId = data?.parentLoopId - // Combined store subscription to reduce subscription overhead const { diffAnalysis, isShowingDiff, isDiffReady } = useWorkflowDiffStore( useShallow((state) => ({ diffAnalysis: state.diffAnalysis, @@ -98,7 +95,8 @@ const WorkflowEdgeComponent = ({ } else if (edgeDiffStatus === 'new') { color = 'var(--brand-tertiary)' } else if (edgeRunStatus === 'success') { - color = 'var(--border-success)' + // Use green for preview mode, default for canvas execution + color = previewExecutionStatus ? 'var(--brand-tertiary-2)' : 'var(--border-success)' } else if (edgeRunStatus === 'error') { color = 'var(--text-error)' } @@ -120,34 +118,18 @@ const WorkflowEdgeComponent = ({ strokeDasharray: edgeDiffStatus === 'deleted' ? '10,5' : undefined, opacity, } - }, [style, edgeDiffStatus, isSelected, isErrorEdge, edgeRunStatus]) + }, [style, edgeDiffStatus, isSelected, isErrorEdge, edgeRunStatus, previewExecutionStatus]) return ( <> - - {/* Animate dash offset for edge movement effect */} - + {isSelected && (
    state.activeWorkflowId) @@ -40,14 +41,13 @@ export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVis isDeletedBlock, } = useBlockState(blockId, currentWorkflow, data) - const isActive = isPreview ? false : blockIsActive + // In preview mode, use isPreviewSelected for selection state + const isActive = isPreview ? isPreviewSelected : blockIsActive const lastRunPath = useExecutionStore((state) => state.lastRunPath) const runPathStatus = isPreview ? undefined : lastRunPath.get(blockId) const setCurrentBlockId = usePanelEditorStore((state) => state.setCurrentBlockId) - const currentBlockId = usePanelEditorStore((state) => state.currentBlockId) - const isFocused = isPreview ? false : currentBlockId === blockId const handleClick = useCallback(() => { if (!isPreview) { @@ -60,12 +60,12 @@ export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVis getBlockRingStyles({ isActive, isPending: isPreview ? false : isPending, - isFocused, isDeletedBlock: isPreview ? false : isDeletedBlock, diffStatus: isPreview ? undefined : diffStatus, runPathStatus, + isPreviewSelection: isPreview && isPreviewSelected, }), - [isActive, isPending, isFocused, isDeletedBlock, diffStatus, runPathStatus, isPreview] + [isActive, isPending, isDeletedBlock, diffStatus, runPathStatus, isPreview, isPreviewSelected] ) return { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities.ts index 5665789235..ffa148d881 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities.ts @@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger' import { useReactFlow } from 'reactflow' import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions' import { getBlock } from '@/blocks/registry' +import { useWorkflowStore } from '@/stores/workflows/workflow/store' const logger = createLogger('NodeUtilities') @@ -208,28 +209,30 @@ export function useNodeUtilities(blocks: Record) { * to the content area bounds (after header and padding). * @param nodeId ID of the node being repositioned * @param newParentId ID of the new parent + * @param skipClamping If true, returns raw relative position without clamping to container bounds * @returns Relative position coordinates {x, y} within the parent */ const calculateRelativePosition = useCallback( - (nodeId: string, newParentId: string): { x: number; y: number } => { + (nodeId: string, newParentId: string, skipClamping?: boolean): { x: number; y: number } => { const nodeAbsPos = getNodeAbsolutePosition(nodeId) const parentAbsPos = getNodeAbsolutePosition(newParentId) - const parentNode = getNodes().find((n) => n.id === newParentId) - // Calculate raw relative position (relative to parent origin) const rawPosition = { x: nodeAbsPos.x - parentAbsPos.x, y: nodeAbsPos.y - parentAbsPos.y, } - // Get container and block dimensions + if (skipClamping) { + return rawPosition + } + + const parentNode = getNodes().find((n) => n.id === newParentId) const containerDimensions = { width: parentNode?.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH, height: parentNode?.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, } const blockDimensions = getBlockDimensions(nodeId) - // Clamp position to keep block inside content area return clampPositionToContainer(rawPosition, containerDimensions, blockDimensions) }, [getNodeAbsolutePosition, getNodes, getBlockDimensions] @@ -298,12 +301,12 @@ export function useNodeUtilities(blocks: Record) { */ const calculateLoopDimensions = useCallback( (nodeId: string): { width: number; height: number } => { - // Check both React Flow's node.parentId AND blocks store's data.parentId - // This ensures we catch children even if React Flow hasn't re-rendered yet - const childNodes = getNodes().filter( - (node) => node.parentId === nodeId || blocks[node.id]?.data?.parentId === nodeId + const currentBlocks = useWorkflowStore.getState().blocks + const childBlockIds = Object.keys(currentBlocks).filter( + (id) => currentBlocks[id]?.data?.parentId === nodeId ) - if (childNodes.length === 0) { + + if (childBlockIds.length === 0) { return { width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH, height: CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, @@ -313,30 +316,28 @@ export function useNodeUtilities(blocks: Record) { let maxRight = 0 let maxBottom = 0 - childNodes.forEach((node) => { - const { width: nodeWidth, height: nodeHeight } = getBlockDimensions(node.id) - // Use block position from store if available (more up-to-date) - const block = blocks[node.id] - const position = block?.position || node.position - maxRight = Math.max(maxRight, position.x + nodeWidth) - maxBottom = Math.max(maxBottom, position.y + nodeHeight) - }) + for (const childId of childBlockIds) { + const child = currentBlocks[childId] + if (!child?.position) continue + + const { width: childWidth, height: childHeight } = getBlockDimensions(childId) + + maxRight = Math.max(maxRight, child.position.x + childWidth) + maxBottom = Math.max(maxBottom, child.position.y + childHeight) + } const width = Math.max( CONTAINER_DIMENSIONS.DEFAULT_WIDTH, - CONTAINER_DIMENSIONS.LEFT_PADDING + maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING + maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING ) const height = Math.max( CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, - CONTAINER_DIMENSIONS.HEADER_HEIGHT + - CONTAINER_DIMENSIONS.TOP_PADDING + - maxBottom + - CONTAINER_DIMENSIONS.BOTTOM_PADDING + maxBottom + CONTAINER_DIMENSIONS.BOTTOM_PADDING ) return { width, height } }, - [getNodes, getBlockDimensions, blocks] + [getBlockDimensions] ) /** @@ -345,29 +346,27 @@ export function useNodeUtilities(blocks: Record) { */ const resizeLoopNodes = useCallback( (updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => void) => { - const containerNodes = getNodes() - .filter((node) => node.type && isContainerType(node.type)) - .map((node) => ({ - ...node, - depth: getNodeDepth(node.id), + const currentBlocks = useWorkflowStore.getState().blocks + const containerBlocks = Object.entries(currentBlocks) + .filter(([, block]) => block?.type && isContainerType(block.type)) + .map(([id, block]) => ({ + id, + block, + depth: getNodeDepth(id), })) - // Sort by depth descending - process innermost containers first - // so their dimensions are correct when outer containers calculate sizes .sort((a, b) => b.depth - a.depth) - containerNodes.forEach((node) => { - const dimensions = calculateLoopDimensions(node.id) - // Get current dimensions from the blocks store rather than React Flow's potentially stale state - const currentWidth = blocks[node.id]?.data?.width - const currentHeight = blocks[node.id]?.data?.height + for (const { id, block } of containerBlocks) { + const dimensions = calculateLoopDimensions(id) + const currentWidth = block?.data?.width + const currentHeight = block?.data?.height - // Only update if dimensions actually changed to avoid unnecessary re-renders if (dimensions.width !== currentWidth || dimensions.height !== currentHeight) { - updateNodeDimensions(node.id, dimensions) + updateNodeDimensions(id, dimensions) } - }) + } }, - [getNodes, isContainerType, getNodeDepth, calculateLoopDimensions, blocks] + [isContainerType, getNodeDepth, calculateLoopDimensions] ) /** diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/block-ring-utils.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/block-ring-utils.ts index 1b532c694f..1490d6040b 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/block-ring-utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/block-ring-utils.ts @@ -7,66 +7,64 @@ export type BlockRunPathStatus = 'success' | 'error' | undefined export interface BlockRingOptions { isActive: boolean isPending: boolean - isFocused: boolean isDeletedBlock: boolean diffStatus: BlockDiffStatus runPathStatus: BlockRunPathStatus + isPreviewSelection?: boolean } /** * Derives visual ring visibility and class names for workflow blocks - * based on execution, focus, diff, deletion, and run-path states. + * based on execution, diff, deletion, and run-path states. */ export function getBlockRingStyles(options: BlockRingOptions): { hasRing: boolean ringClassName: string } { - const { isActive, isPending, isFocused, isDeletedBlock, diffStatus, runPathStatus } = options + const { isActive, isPending, isDeletedBlock, diffStatus, runPathStatus, isPreviewSelection } = + options const hasRing = isActive || isPending || - isFocused || diffStatus === 'new' || diffStatus === 'edited' || isDeletedBlock || !!runPathStatus const ringClassName = cn( + // Preview selection: static blue ring (standard thickness, no animation) + isActive && isPreviewSelection && 'ring-[1.75px] ring-[var(--brand-secondary)]', // Executing block: pulsing success ring with prominent thickness - isActive && 'ring-[3.5px] ring-[var(--border-success)] animate-ring-pulse', + isActive && + !isPreviewSelection && + 'ring-[3.5px] ring-[var(--border-success)] animate-ring-pulse', // Non-active states use standard ring utilities !isActive && hasRing && 'ring-[1.75px]', // Pending state: warning ring !isActive && isPending && 'ring-[var(--warning)]', - // Focused (selected) state: brand ring - !isActive && !isPending && isFocused && 'ring-[var(--brand-secondary)]', - // Deleted state (highest priority after active/pending/focused) - !isActive && !isPending && !isFocused && isDeletedBlock && 'ring-[var(--text-error)]', + // Deleted state (highest priority after active/pending) + !isActive && !isPending && isDeletedBlock && 'ring-[var(--text-error)]', // Diff states !isActive && !isPending && - !isFocused && !isDeletedBlock && diffStatus === 'new' && 'ring-[var(--brand-tertiary)]', !isActive && !isPending && - !isFocused && !isDeletedBlock && diffStatus === 'edited' && 'ring-[var(--warning)]', // Run path states (lowest priority - only show if no other states active) !isActive && !isPending && - !isFocused && !isDeletedBlock && !diffStatus && runPathStatus === 'success' && 'ring-[var(--border-success)]', !isActive && !isPending && - !isFocused && !isDeletedBlock && !diffStatus && runPathStatus === 'error' && diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-canvas-helpers.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-canvas-helpers.ts new file mode 100644 index 0000000000..a0f2a57722 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-canvas-helpers.ts @@ -0,0 +1,181 @@ +import type { Edge, Node } from 'reactflow' +import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions' +import { TriggerUtils } from '@/lib/workflows/triggers/triggers' +import { clampPositionToContainer } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities' +import type { BlockState } from '@/stores/workflows/workflow/types' + +/** + * Checks if the currently focused element is an editable input. + * Returns true if the user is typing in an input, textarea, or contenteditable element. + */ +export function isInEditableElement(): boolean { + const activeElement = document.activeElement + return ( + activeElement instanceof HTMLInputElement || + activeElement instanceof HTMLTextAreaElement || + activeElement?.hasAttribute('contenteditable') === true + ) +} + +interface TriggerValidationResult { + isValid: boolean + message?: string +} + +/** + * Validates that pasting/duplicating trigger blocks won't violate constraints. + * Returns validation result with error message if invalid. + */ +export function validateTriggerPaste( + blocksToAdd: Array<{ type: string }>, + existingBlocks: Record, + action: 'paste' | 'duplicate' +): TriggerValidationResult { + for (const block of blocksToAdd) { + if (TriggerUtils.isAnyTriggerType(block.type)) { + const issue = TriggerUtils.getTriggerAdditionIssue(existingBlocks, block.type) + if (issue) { + const actionText = action === 'paste' ? 'paste' : 'duplicate' + const message = + issue.issue === 'legacy' + ? `Cannot ${actionText} trigger blocks when a legacy Start block exists.` + : `A workflow can only have one ${issue.triggerName} trigger block. ${action === 'paste' ? 'Please remove the existing one before pasting.' : 'Cannot duplicate.'}` + return { isValid: false, message } + } + } + } + return { isValid: true } +} + +/** + * Clears drag highlight classes and resets cursor state. + * Used when drag operations end or are cancelled. + */ +export function clearDragHighlights(): void { + document.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over').forEach((el) => { + el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over') + }) + document.body.style.cursor = '' +} + +/** + * Selects nodes by their IDs after paste/duplicate operations. + * Defers selection to next animation frame to allow displayNodes to sync from store first. + * This is necessary because the component uses controlled state (nodes={displayNodes}) + * and newly added blocks need time to propagate through the store → derivedNodes → displayNodes cycle. + */ +export function selectNodesDeferred( + nodeIds: string[], + setDisplayNodes: (updater: (nodes: Node[]) => Node[]) => void +): void { + const idsSet = new Set(nodeIds) + requestAnimationFrame(() => { + setDisplayNodes((nodes) => + nodes.map((node) => ({ + ...node, + selected: idsSet.has(node.id), + })) + ) + }) +} + +interface BlockData { + height?: number + data?: { + parentId?: string + width?: number + height?: number + } +} + +/** + * Calculates the final position for a node, clamping it to parent container if needed. + * Returns the clamped position suitable for persistence. + */ +export function getClampedPositionForNode( + nodeId: string, + nodePosition: { x: number; y: number }, + blocks: Record, + allNodes: Node[] +): { x: number; y: number } { + const currentBlock = blocks[nodeId] + const currentParentId = currentBlock?.data?.parentId + + if (!currentParentId) { + return nodePosition + } + + const parentNode = allNodes.find((n) => n.id === currentParentId) + if (!parentNode) { + return nodePosition + } + + const containerDimensions = { + width: parentNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH, + height: parentNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, + } + const blockDimensions = { + width: BLOCK_DIMENSIONS.FIXED_WIDTH, + height: Math.max( + currentBlock?.height || BLOCK_DIMENSIONS.MIN_HEIGHT, + BLOCK_DIMENSIONS.MIN_HEIGHT + ), + } + + return clampPositionToContainer(nodePosition, containerDimensions, blockDimensions) +} + +/** + * Computes position updates for multiple nodes, clamping each to its parent container. + * Used for batch position updates after multi-node drag or selection drag. + */ +export function computeClampedPositionUpdates( + nodes: Node[], + blocks: Record, + allNodes: Node[] +): Array<{ id: string; position: { x: number; y: number } }> { + return nodes.map((node) => ({ + id: node.id, + position: getClampedPositionForNode(node.id, node.position, blocks, allNodes), + })) +} + +interface ParentUpdateEntry { + blockId: string + newParentId: string + affectedEdges: Edge[] +} + +/** + * Computes parent update entries for nodes being moved into a subflow. + * Only includes "boundary edges" - edges that cross the selection boundary + * (one end inside selection, one end outside). Edges between nodes in the + * selection are preserved. + */ +export function computeParentUpdateEntries( + validNodes: Node[], + allEdges: Edge[], + targetParentId: string +): ParentUpdateEntry[] { + const movingNodeIds = new Set(validNodes.map((n) => n.id)) + + // Find edges that cross the boundary (one end inside selection, one end outside) + // Edges between nodes in the selection should stay intact + const boundaryEdges = allEdges.filter((e) => { + const sourceInSelection = movingNodeIds.has(e.source) + const targetInSelection = movingNodeIds.has(e.target) + // Only remove if exactly one end is in the selection (crosses boundary) + return sourceInSelection !== targetInSelection + }) + + // Build updates for all valid nodes + return validNodes.map((n) => { + // Only include boundary edges connected to this specific node + const edgesForThisNode = boundaryEdges.filter((e) => e.source === n.id || e.target === n.id) + return { + blockId: n.id, + newParentId: targetParentId, + affectedEdges: edgesForThisNode, + } + }) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx index b906664058..876ee009a4 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx @@ -11,6 +11,7 @@ import ReactFlow, { type NodeChange, type NodeTypes, ReactFlowProvider, + SelectionMode, useReactFlow, } from 'reactflow' import 'reactflow/dist/style.css' @@ -42,9 +43,15 @@ import { TrainingModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/comp import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block' import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge' import { + clearDragHighlights, + computeClampedPositionUpdates, + getClampedPositionForNode, + isInEditableElement, + selectNodesDeferred, useAutoLayout, useCurrentWorkflow, useNodeUtilities, + validateTriggerPaste, } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks' import { useCanvasContextMenu } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-canvas-context-menu' import { @@ -180,11 +187,12 @@ const reactFlowStyles = [ const reactFlowFitViewOptions = { padding: 0.6, maxZoom: 1.0 } as const const reactFlowProOptions = { hideAttribution: true } as const -interface SelectedEdgeInfo { - id: string - parentLoopId?: string - contextId?: string -} +/** + * Map from edge contextId to edge id. + * Context IDs include parent loop info for edges inside loops. + * The actual edge ID is stored as the value for deletion operations. + */ +type SelectedEdgesMap = Map interface BlockData { id: string @@ -200,7 +208,7 @@ interface BlockData { const WorkflowContent = React.memo(() => { const [isCanvasReady, setIsCanvasReady] = useState(false) const [potentialParentId, setPotentialParentId] = useState(null) - const [selectedEdgeInfo, setSelectedEdgeInfo] = useState(null) + const [selectedEdges, setSelectedEdges] = useState(new Map()) const [isShiftPressed, setIsShiftPressed] = useState(false) const [isSelectionDragActive, setIsSelectionDragActive] = useState(false) const [isErrorConnectionDrag, setIsErrorConnectionDrag] = useState(false) @@ -280,7 +288,7 @@ const WorkflowContent = React.memo(() => { useStreamCleanup(copilotCleanup) - const { blocks, edges, isDiffMode, lastSaved } = currentWorkflow + const { blocks, edges, lastSaved } = currentWorkflow const isWorkflowReady = useMemo( () => @@ -295,6 +303,7 @@ const WorkflowContent = React.memo(() => { const { getNodeDepth, getNodeAbsolutePosition, + calculateRelativePosition, isPointInLoopNode, resizeLoopNodes, updateNodeParent: updateNodeParentUtil, @@ -343,6 +352,11 @@ const WorkflowContent = React.memo(() => { /** Stores source node/handle info when a connection drag starts for drop-on-block detection. */ const connectionSourceRef = useRef<{ nodeId: string; handleId: string } | null>(null) + /** Stores start positions for multi-node drag undo/redo recording. */ + const multiNodeDragStartRef = useRef>( + new Map() + ) + /** Re-applies diff markers when blocks change after socket rehydration. */ const blocksRef = useRef(blocks) useEffect(() => { @@ -431,12 +445,14 @@ const WorkflowContent = React.memo(() => { const { collaborativeAddEdge: addEdge, collaborativeRemoveEdge: removeEdge, + collaborativeBatchRemoveEdges, collaborativeBatchUpdatePositions, collaborativeUpdateParentId: updateParentId, + collaborativeBatchUpdateParent, collaborativeBatchAddBlocks, collaborativeBatchRemoveBlocks, - collaborativeToggleBlockEnabled, - collaborativeToggleBlockHandles, + collaborativeBatchToggleBlockEnabled, + collaborativeBatchToggleBlockHandles, undo, redo, } = useCollaborativeWorkflow() @@ -636,22 +652,14 @@ const WorkflowContent = React.memo(() => { } = pasteData const pastedBlocksArray = Object.values(pastedBlocks) - for (const block of pastedBlocksArray) { - if (TriggerUtils.isAnyTriggerType(block.type)) { - const issue = TriggerUtils.getTriggerAdditionIssue(blocks, block.type) - if (issue) { - const message = - issue.issue === 'legacy' - ? 'Cannot paste trigger blocks when a legacy Start block exists.' - : `A workflow can only have one ${issue.triggerName} trigger block. Please remove the existing one before pasting.` - addNotification({ - level: 'error', - message, - workflowId: activeWorkflowId || undefined, - }) - return - } - } + const validation = validateTriggerPaste(pastedBlocksArray, blocks, 'paste') + if (!validation.isValid) { + addNotification({ + level: 'error', + message: validation.message!, + workflowId: activeWorkflowId || undefined, + }) + return } collaborativeBatchAddBlocks( @@ -661,6 +669,11 @@ const WorkflowContent = React.memo(() => { pastedParallels, pastedSubBlockValues ) + + selectNodesDeferred( + pastedBlocksArray.map((b) => b.id), + setDisplayNodes + ) }, [ hasClipboard, clipboard, @@ -687,22 +700,14 @@ const WorkflowContent = React.memo(() => { } = pasteData const pastedBlocksArray = Object.values(pastedBlocks) - for (const block of pastedBlocksArray) { - if (TriggerUtils.isAnyTriggerType(block.type)) { - const issue = TriggerUtils.getTriggerAdditionIssue(blocks, block.type) - if (issue) { - const message = - issue.issue === 'legacy' - ? 'Cannot duplicate trigger blocks when a legacy Start block exists.' - : `A workflow can only have one ${issue.triggerName} trigger block. Cannot duplicate.` - addNotification({ - level: 'error', - message, - workflowId: activeWorkflowId || undefined, - }) - return - } - } + const validation = validateTriggerPaste(pastedBlocksArray, blocks, 'duplicate') + if (!validation.isValid) { + addNotification({ + level: 'error', + message: validation.message!, + workflowId: activeWorkflowId || undefined, + }) + return } collaborativeBatchAddBlocks( @@ -712,6 +717,11 @@ const WorkflowContent = React.memo(() => { pastedParallels, pastedSubBlockValues ) + + selectNodesDeferred( + pastedBlocksArray.map((b) => b.id), + setDisplayNodes + ) }, [ contextMenuBlocks, copyBlocks, @@ -728,25 +738,26 @@ const WorkflowContent = React.memo(() => { }, [contextMenuBlocks, collaborativeBatchRemoveBlocks]) const handleContextToggleEnabled = useCallback(() => { - contextMenuBlocks.forEach((block) => { - collaborativeToggleBlockEnabled(block.id) - }) - }, [contextMenuBlocks, collaborativeToggleBlockEnabled]) + const blockIds = contextMenuBlocks.map((block) => block.id) + collaborativeBatchToggleBlockEnabled(blockIds) + }, [contextMenuBlocks, collaborativeBatchToggleBlockEnabled]) const handleContextToggleHandles = useCallback(() => { - contextMenuBlocks.forEach((block) => { - collaborativeToggleBlockHandles(block.id) - }) - }, [contextMenuBlocks, collaborativeToggleBlockHandles]) + const blockIds = contextMenuBlocks.map((block) => block.id) + collaborativeBatchToggleBlockHandles(blockIds) + }, [contextMenuBlocks, collaborativeBatchToggleBlockHandles]) const handleContextRemoveFromSubflow = useCallback(() => { - contextMenuBlocks.forEach((block) => { - if (block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')) { - window.dispatchEvent( - new CustomEvent('remove-from-subflow', { detail: { blockId: block.id } }) - ) - } - }) + const blocksToRemove = contextMenuBlocks.filter( + (block) => block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel') + ) + if (blocksToRemove.length > 0) { + window.dispatchEvent( + new CustomEvent('remove-from-subflow', { + detail: { blockIds: blocksToRemove.map((b) => b.id) }, + }) + ) + } }, [contextMenuBlocks]) const handleContextOpenEditor = useCallback(() => { @@ -788,13 +799,7 @@ const WorkflowContent = React.memo(() => { let cleanup: (() => void) | null = null const handleKeyDown = (event: KeyboardEvent) => { - const activeElement = document.activeElement - const isEditableElement = - activeElement instanceof HTMLInputElement || - activeElement instanceof HTMLTextAreaElement || - activeElement?.hasAttribute('contenteditable') - - if (isEditableElement) { + if (isInEditableElement()) { event.stopPropagation() return } @@ -840,22 +845,14 @@ const WorkflowContent = React.memo(() => { const pasteData = preparePasteData(pasteOffset) if (pasteData) { const pastedBlocks = Object.values(pasteData.blocks) - for (const block of pastedBlocks) { - if (TriggerUtils.isAnyTriggerType(block.type)) { - const issue = TriggerUtils.getTriggerAdditionIssue(blocks, block.type) - if (issue) { - const message = - issue.issue === 'legacy' - ? 'Cannot paste trigger blocks when a legacy Start block exists.' - : `A workflow can only have one ${issue.triggerName} trigger block. Please remove the existing one before pasting.` - addNotification({ - level: 'error', - message, - workflowId: activeWorkflowId || undefined, - }) - return - } - } + const validation = validateTriggerPaste(pastedBlocks, blocks, 'paste') + if (!validation.isValid) { + addNotification({ + level: 'error', + message: validation.message!, + workflowId: activeWorkflowId || undefined, + }) + return } collaborativeBatchAddBlocks( @@ -865,6 +862,11 @@ const WorkflowContent = React.memo(() => { pasteData.parallels, pasteData.subBlockValues ) + + selectNodesDeferred( + pastedBlocks.map((b) => b.id), + setDisplayNodes + ) } } } @@ -919,33 +921,6 @@ const WorkflowContent = React.memo(() => { [removeEdge] ) - /** Handles ActionBar remove-from-subflow events. */ - useEffect(() => { - const handleRemoveFromSubflow = (event: Event) => { - const customEvent = event as CustomEvent<{ blockId: string }> - const blockId = customEvent.detail?.blockId - if (!blockId) return - - try { - const currentBlock = blocks[blockId] - const parentId = currentBlock?.data?.parentId - if (!parentId) return - - const edgesToRemove = edgesForDisplay.filter( - (e) => e.source === blockId || e.target === blockId - ) - removeEdgesForNode(blockId, edgesToRemove) - updateNodeParent(blockId, null, edgesToRemove) - } catch (err) { - logger.error('Failed to remove from subflow', { err }) - } - } - - window.addEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener) - return () => - window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener) - }, [blocks, edgesForDisplay, removeEdgesForNode, updateNodeParent]) - /** Finds the closest block to a position for auto-connect. */ const findClosestOutput = useCallback( (newNodePosition: { x: number; y: number }): BlockData | null => { @@ -1168,10 +1143,7 @@ const WorkflowContent = React.memo(() => { try { const containerInfo = isPointInLoopNode(position) - document - .querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over') - .forEach((el) => el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')) - document.body.style.cursor = '' + clearDragHighlights() document.body.classList.remove('sim-drag-subflow') if (data.type === 'loop' || data.type === 'parallel') { @@ -1611,11 +1583,7 @@ const WorkflowContent = React.memo(() => { const containerInfo = isPointInLoopNode(position) // Clear any previous highlighting - document - .querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over') - .forEach((el) => { - el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over') - }) + clearDragHighlights() // Highlight container if hovering over it and not dragging a subflow // Subflow drag is marked by body class flag set by toolbar @@ -1815,7 +1783,7 @@ const WorkflowContent = React.memo(() => { const nodeArray: Node[] = [] // Add block nodes - Object.entries(blocks).forEach(([blockId, block]) => { + Object.entries(blocks).forEach(([, block]) => { if (!block || !block.type || !block.name) { return } @@ -1892,8 +1860,11 @@ const WorkflowContent = React.memo(() => { }, // Include dynamic dimensions for container resizing calculations (must match rendered size) // Both note and workflow blocks calculate dimensions deterministically via useBlockDimensions + // Use estimated dimensions for blocks without measured height to ensure selection bounds are correct width: BLOCK_DIMENSIONS.FIXED_WIDTH, - height: Math.max(block.height || BLOCK_DIMENSIONS.MIN_HEIGHT, BLOCK_DIMENSIONS.MIN_HEIGHT), + height: block.height + ? Math.max(block.height, BLOCK_DIMENSIONS.MIN_HEIGHT) + : estimateBlockDimensions(block.type).height, }) }) @@ -1945,9 +1916,77 @@ const WorkflowContent = React.memo(() => { }, [isShiftPressed]) useEffect(() => { - setDisplayNodes(derivedNodes) + // Preserve selection state when syncing from derivedNodes + setDisplayNodes((currentNodes) => { + const selectedIds = new Set(currentNodes.filter((n) => n.selected).map((n) => n.id)) + return derivedNodes.map((node) => ({ + ...node, + selected: selectedIds.has(node.id), + })) + }) }, [derivedNodes]) + /** Handles ActionBar remove-from-subflow events. */ + useEffect(() => { + const handleRemoveFromSubflow = (event: Event) => { + const customEvent = event as CustomEvent<{ blockIds: string[] }> + const blockIds = customEvent.detail?.blockIds + if (!blockIds || blockIds.length === 0) return + + try { + const validBlockIds = blockIds.filter((id) => { + const block = blocks[id] + return block?.data?.parentId + }) + if (validBlockIds.length === 0) return + + const movingNodeIds = new Set(validBlockIds) + + const boundaryEdges = edgesForDisplay.filter((e) => { + const sourceInSelection = movingNodeIds.has(e.source) + const targetInSelection = movingNodeIds.has(e.target) + return sourceInSelection !== targetInSelection + }) + + // Collect absolute positions BEFORE updating parents + const absolutePositions = new Map() + for (const blockId of validBlockIds) { + absolutePositions.set(blockId, getNodeAbsolutePosition(blockId)) + } + + for (const blockId of validBlockIds) { + const edgesForThisNode = boundaryEdges.filter( + (e) => e.source === blockId || e.target === blockId + ) + removeEdgesForNode(blockId, edgesForThisNode) + updateNodeParent(blockId, null, edgesForThisNode) + } + + // Immediately update displayNodes to prevent React Flow from using stale parent data + setDisplayNodes((nodes) => + nodes.map((n) => { + const absPos = absolutePositions.get(n.id) + if (absPos) { + return { + ...n, + position: absPos, + parentId: undefined, + extent: undefined, + } + } + return n + }) + ) + } catch (err) { + logger.error('Failed to remove from subflow', { err }) + } + } + + window.addEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener) + return () => + window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener) + }, [blocks, edgesForDisplay, removeEdgesForNode, updateNodeParent, getNodeAbsolutePosition]) + /** Handles node position changes - updates local state for smooth drag, syncs to store only on drag end. */ const onNodesChange = useCallback((changes: NodeChange[]) => { setDisplayNodes((nds) => applyNodeChanges(changes, nds)) @@ -2259,12 +2298,8 @@ const WorkflowContent = React.memo(() => { if (isStarterBlock) { // If it's a starter block, remove any highlighting and don't allow it to be dragged into containers if (potentialParentId) { - const prevElement = document.querySelector(`[data-id="${potentialParentId}"]`) - if (prevElement) { - prevElement.classList.remove('loop-node-drag-over', 'parallel-node-drag-over') - } + clearDragHighlights() setPotentialParentId(null) - document.body.style.cursor = '' } return // Exit early - don't process any container intersections for starter blocks } @@ -2276,12 +2311,8 @@ const WorkflowContent = React.memo(() => { if (node.type === 'subflowNode') { // Clear any highlighting for subflow nodes if (potentialParentId) { - const prevElement = document.querySelector(`[data-id="${potentialParentId}"]`) - if (prevElement) { - prevElement.classList.remove('loop-node-drag-over', 'parallel-node-drag-over') - } + clearDragHighlights() setPotentialParentId(null) - document.body.style.cursor = '' } return // Exit early - subflows cannot be placed inside other subflows } @@ -2292,9 +2323,6 @@ const WorkflowContent = React.memo(() => { // Only consider container nodes that aren't the dragged node if (n.type !== 'subflowNode' || n.id === node.id) return false - // Skip if this container is already the parent of the node being dragged - if (n.id === currentParentId) return false - // Get the container's absolute position const containerAbsolutePos = getNodeAbsolutePosition(n.id) @@ -2382,12 +2410,8 @@ const WorkflowContent = React.memo(() => { } else { // Remove highlighting if no longer over a container if (potentialParentId) { - const prevElement = document.querySelector(`[data-id="${potentialParentId}"]`) - if (prevElement) { - prevElement.classList.remove('loop-node-drag-over', 'parallel-node-drag-over') - } + clearDragHighlights() setPotentialParentId(null) - document.body.style.cursor = '' } } }, @@ -2407,6 +2431,8 @@ const WorkflowContent = React.memo(() => { // Store the original parent ID when starting to drag const currentParentId = blocks[node.id]?.data?.parentId || null setDragStartParentId(currentParentId) + // Initialize potentialParentId to the current parent so a click without movement doesn't remove from subflow + setPotentialParentId(currentParentId) // Store starting position for undo/redo move entry setDragStartPosition({ id: node.id, @@ -2414,49 +2440,149 @@ const WorkflowContent = React.memo(() => { y: node.position.y, parentId: currentParentId, }) + + // Capture all selected nodes' positions for multi-node undo/redo + const allNodes = getNodes() + const selectedNodes = allNodes.filter((n) => n.selected) + multiNodeDragStartRef.current.clear() + selectedNodes.forEach((n) => { + const block = blocks[n.id] + if (block) { + multiNodeDragStartRef.current.set(n.id, { + x: n.position.x, + y: n.position.y, + parentId: block.data?.parentId, + }) + } + }) }, - [blocks, setDragStartPosition] + [blocks, setDragStartPosition, getNodes, potentialParentId, setPotentialParentId] ) /** Handles node drag stop to establish parent-child relationships. */ const onNodeDragStop = useCallback( (_event: React.MouseEvent, node: any) => { - // Clear UI effects - document.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over').forEach((el) => { - el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over') - }) - document.body.style.cursor = '' + clearDragHighlights() - // Get the block's current parent (if any) - const currentBlock = blocks[node.id] - const currentParentId = currentBlock?.data?.parentId + // Get all selected nodes to update their positions too + const allNodes = getNodes() + const selectedNodes = allNodes.filter((n) => n.selected) - // Calculate position - clamp if inside a container - let finalPosition = node.position - if (currentParentId) { - // Block is inside a container - clamp position to keep it fully inside - const parentNode = getNodes().find((n) => n.id === currentParentId) - if (parentNode) { - const containerDimensions = { - width: parentNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH, - height: parentNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, - } - const blockDimensions = { - width: BLOCK_DIMENSIONS.FIXED_WIDTH, - height: Math.max( - currentBlock?.height || BLOCK_DIMENSIONS.MIN_HEIGHT, - BLOCK_DIMENSIONS.MIN_HEIGHT - ), - } + // If multiple nodes are selected, update all their positions + if (selectedNodes.length > 1) { + const positionUpdates = computeClampedPositionUpdates(selectedNodes, blocks, allNodes) + collaborativeBatchUpdatePositions(positionUpdates, { + previousPositions: multiNodeDragStartRef.current, + }) - finalPosition = clampPositionToContainer( - node.position, - containerDimensions, - blockDimensions - ) + // Process parent updates for nodes whose parent is changing + // Check each node individually - don't rely on dragStartParentId since + // multi-node selections can contain nodes from different parents + const selectedNodeIds = new Set(selectedNodes.map((n) => n.id)) + const nodesNeedingParentUpdate = selectedNodes.filter((n) => { + const block = blocks[n.id] + if (!block) return false + const currentParent = block.data?.parentId || null + // Skip if the node's parent is also being moved (keep children with their parent) + if (currentParent && selectedNodeIds.has(currentParent)) return false + // Node needs update if current parent !== target parent + return currentParent !== potentialParentId + }) + + if (nodesNeedingParentUpdate.length > 0) { + // Filter out nodes that cannot be moved into subflows (when target is a subflow) + const validNodes = nodesNeedingParentUpdate.filter((n) => { + // These restrictions only apply when moving INTO a subflow + if (potentialParentId) { + if (n.data?.type === 'starter') return false + const block = blocks[n.id] + if (block && TriggerUtils.isTriggerBlock(block)) return false + if (n.type === 'subflowNode') return false + } + return true + }) + + if (validNodes.length > 0) { + const movingNodeIds = new Set(validNodes.map((n) => n.id)) + const boundaryEdges = edgesForDisplay.filter((e) => { + const sourceInSelection = movingNodeIds.has(e.source) + const targetInSelection = movingNodeIds.has(e.target) + return sourceInSelection !== targetInSelection + }) + + const rawUpdates = validNodes.map((n) => { + const edgesForThisNode = boundaryEdges.filter( + (e) => e.source === n.id || e.target === n.id + ) + const newPosition = potentialParentId + ? calculateRelativePosition(n.id, potentialParentId, true) + : getNodeAbsolutePosition(n.id) + return { + blockId: n.id, + newParentId: potentialParentId, + newPosition, + affectedEdges: edgesForThisNode, + } + }) + + let updates = rawUpdates + if (potentialParentId) { + const minX = Math.min(...rawUpdates.map((u) => u.newPosition.x)) + const minY = Math.min(...rawUpdates.map((u) => u.newPosition.y)) + + const targetMinX = CONTAINER_DIMENSIONS.LEFT_PADDING + const targetMinY = + CONTAINER_DIMENSIONS.HEADER_HEIGHT + CONTAINER_DIMENSIONS.TOP_PADDING + + const shiftX = minX < targetMinX ? targetMinX - minX : 0 + const shiftY = minY < targetMinY ? targetMinY - minY : 0 + + updates = rawUpdates.map((u) => ({ + ...u, + newPosition: { + x: u.newPosition.x + shiftX, + y: u.newPosition.y + shiftY, + }, + })) + } + + collaborativeBatchUpdateParent(updates) + + setDisplayNodes((nodes) => + nodes.map((node) => { + const update = updates.find((u) => u.blockId === node.id) + if (update) { + return { + ...node, + position: update.newPosition, + parentId: update.newParentId ?? undefined, + } + } + return node + }) + ) + + if (potentialParentId) { + resizeLoopNodesWrapper() + } + + logger.info('Batch moved nodes to new parent', { + targetParentId: potentialParentId, + nodeCount: validNodes.length, + }) + } } + + // Clear drag start state + setDragStartPosition(null) + setPotentialParentId(null) + multiNodeDragStartRef.current.clear() + return } + // Single node drag - original logic + const finalPosition = getClampedPositionForNode(node.id, node.position, blocks, allNodes) + updateBlockPosition(node.id, finalPosition) // Record single move entry on drag end to avoid micro-moves @@ -2564,10 +2690,66 @@ const WorkflowContent = React.memo(() => { const affectedEdges = [...edgesToRemove, ...edgesToAdd] updateNodeParent(node.id, potentialParentId, affectedEdges) + setDisplayNodes((nodes) => + nodes.map((n) => { + if (n.id === node.id) { + return { + ...n, + position: relativePositionBefore, + parentId: potentialParentId, + extent: 'parent' as const, + } + } + return n + }) + ) + // Now add the edges after parent update edgesToAdd.forEach((edge) => addEdge(edge)) window.dispatchEvent(new CustomEvent('skip-edge-recording', { detail: { skip: false } })) + } else if (!potentialParentId && dragStartParentId) { + // Moving OUT of a subflow to canvas + // Get absolute position BEFORE removing from parent + const absolutePosition = getNodeAbsolutePosition(node.id) + + // Remove edges connected to this node since it's leaving its parent + const edgesToRemove = edgesForDisplay.filter( + (e) => e.source === node.id || e.target === node.id + ) + + if (edgesToRemove.length > 0) { + removeEdgesForNode(node.id, edgesToRemove) + + logger.info('Removed edges when moving node out of subflow', { + blockId: node.id, + sourceParentId: dragStartParentId, + edgeCount: edgesToRemove.length, + }) + } + + // Clear the parent relationship + updateNodeParent(node.id, null, edgesToRemove) + + // Immediately update displayNodes to prevent React Flow from using stale parent data + setDisplayNodes((nodes) => + nodes.map((n) => { + if (n.id === node.id) { + return { + ...n, + position: absolutePosition, + parentId: undefined, + extent: undefined, + } + } + return n + }) + ) + + logger.info('Moved node out of subflow', { + blockId: node.id, + sourceParentId: dragStartParentId, + }) } // Reset state @@ -2585,10 +2767,14 @@ const WorkflowContent = React.memo(() => { edgesForDisplay, removeEdgesForNode, getNodeAbsolutePosition, + calculateRelativePosition, + resizeLoopNodesWrapper, getDragStartPosition, setDragStartPosition, addNotification, activeWorkflowId, + collaborativeBatchUpdatePositions, + collaborativeBatchUpdateParent, ] ) @@ -2603,52 +2789,306 @@ const WorkflowContent = React.memo(() => { requestAnimationFrame(() => setIsSelectionDragActive(false)) }, []) + /** Captures initial positions when selection drag starts (for marquee-selected nodes). */ + const onSelectionDragStart = useCallback( + (_event: React.MouseEvent, nodes: Node[]) => { + // Capture the parent ID of the first node as reference (they should all be in the same context) + if (nodes.length > 0) { + const firstNodeParentId = blocks[nodes[0].id]?.data?.parentId || null + setDragStartParentId(firstNodeParentId) + } + + // Capture all selected nodes' positions for undo/redo + multiNodeDragStartRef.current.clear() + nodes.forEach((n) => { + const block = blocks[n.id] + if (block) { + multiNodeDragStartRef.current.set(n.id, { + x: n.position.x, + y: n.position.y, + parentId: block.data?.parentId, + }) + } + }) + }, + [blocks] + ) + + /** Handles selection drag to detect potential parent containers for batch drops. */ + const onSelectionDrag = useCallback( + (_event: React.MouseEvent, nodes: Node[]) => { + if (nodes.length === 0) return + + // Filter out nodes that can't be placed in containers + const eligibleNodes = nodes.filter((n) => { + if (n.data?.type === 'starter') return false + if (n.type === 'subflowNode') return false + const block = blocks[n.id] + if (block && TriggerUtils.isTriggerBlock(block)) return false + return true + }) + + // If no eligible nodes, clear any potential parent + if (eligibleNodes.length === 0) { + if (potentialParentId) { + clearDragHighlights() + setPotentialParentId(null) + } + return + } + + // Calculate bounding box of all dragged nodes using absolute positions + let minX = Number.POSITIVE_INFINITY + let minY = Number.POSITIVE_INFINITY + let maxX = Number.NEGATIVE_INFINITY + let maxY = Number.NEGATIVE_INFINITY + + eligibleNodes.forEach((node) => { + const absolutePos = getNodeAbsolutePosition(node.id) + const block = blocks[node.id] + const width = BLOCK_DIMENSIONS.FIXED_WIDTH + const height = Math.max( + node.height || BLOCK_DIMENSIONS.MIN_HEIGHT, + BLOCK_DIMENSIONS.MIN_HEIGHT + ) + + minX = Math.min(minX, absolutePos.x) + minY = Math.min(minY, absolutePos.y) + maxX = Math.max(maxX, absolutePos.x + width) + maxY = Math.max(maxY, absolutePos.y + height) + }) + + // Use bounding box for intersection detection + const selectionRect = { left: minX, right: maxX, top: minY, bottom: maxY } + + // Find containers that intersect with the selection bounding box + const allNodes = getNodes() + const intersectingContainers = allNodes + .filter((containerNode) => { + if (containerNode.type !== 'subflowNode') return false + // Skip if any dragged node is this container + if (nodes.some((n) => n.id === containerNode.id)) return false + + const containerAbsolutePos = getNodeAbsolutePosition(containerNode.id) + const containerRect = { + left: containerAbsolutePos.x, + right: + containerAbsolutePos.x + + (containerNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH), + top: containerAbsolutePos.y, + bottom: + containerAbsolutePos.y + + (containerNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT), + } + + // Check intersection + return ( + selectionRect.left < containerRect.right && + selectionRect.right > containerRect.left && + selectionRect.top < containerRect.bottom && + selectionRect.bottom > containerRect.top + ) + }) + .map((n) => ({ + container: n, + depth: getNodeDepth(n.id), + size: + (n.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH) * + (n.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT), + })) + + if (intersectingContainers.length > 0) { + // Sort by depth first (deepest first), then by size + const sortedContainers = intersectingContainers.sort((a, b) => { + if (a.depth !== b.depth) return b.depth - a.depth + return a.size - b.size + }) + + const bestMatch = sortedContainers[0] + + if (bestMatch.container.id !== potentialParentId) { + clearDragHighlights() + setPotentialParentId(bestMatch.container.id) + + // Add highlight + const containerElement = document.querySelector(`[data-id="${bestMatch.container.id}"]`) + if (containerElement) { + if ((bestMatch.container.data as SubflowNodeData)?.kind === 'loop') { + containerElement.classList.add('loop-node-drag-over') + } else if ((bestMatch.container.data as SubflowNodeData)?.kind === 'parallel') { + containerElement.classList.add('parallel-node-drag-over') + } + document.body.style.cursor = 'copy' + } + } + } else if (potentialParentId) { + clearDragHighlights() + setPotentialParentId(null) + } + }, + [ + blocks, + getNodes, + potentialParentId, + getNodeAbsolutePosition, + getNodeDepth, + clearDragHighlights, + ] + ) + const onSelectionDragStop = useCallback( (_event: React.MouseEvent, nodes: any[]) => { requestAnimationFrame(() => setIsSelectionDragActive(false)) + clearDragHighlights() if (nodes.length === 0) return - const positionUpdates = nodes.map((node) => { - const currentBlock = blocks[node.id] - const currentParentId = currentBlock?.data?.parentId - let finalPosition = node.position - - if (currentParentId) { - const parentNode = getNodes().find((n) => n.id === currentParentId) - if (parentNode) { - const containerDimensions = { - width: parentNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH, - height: parentNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, - } - const blockDimensions = { - width: BLOCK_DIMENSIONS.FIXED_WIDTH, - height: Math.max( - currentBlock?.height || BLOCK_DIMENSIONS.MIN_HEIGHT, - BLOCK_DIMENSIONS.MIN_HEIGHT - ), - } - finalPosition = clampPositionToContainer( - node.position, - containerDimensions, - blockDimensions + const allNodes = getNodes() + const positionUpdates = computeClampedPositionUpdates(nodes, blocks, allNodes) + collaborativeBatchUpdatePositions(positionUpdates, { + previousPositions: multiNodeDragStartRef.current, + }) + + // Process parent updates for nodes whose parent is changing + // Check each node individually - don't rely on dragStartParentId since + // multi-node selections can contain nodes from different parents + const selectedNodeIds = new Set(nodes.map((n: Node) => n.id)) + const nodesNeedingParentUpdate = nodes.filter((n: Node) => { + const block = blocks[n.id] + if (!block) return false + const currentParent = block.data?.parentId || null + // Skip if the node's parent is also being moved (keep children with their parent) + if (currentParent && selectedNodeIds.has(currentParent)) return false + // Node needs update if current parent !== target parent + return currentParent !== potentialParentId + }) + + if (nodesNeedingParentUpdate.length > 0) { + // Filter out nodes that cannot be moved into subflows (when target is a subflow) + const validNodes = nodesNeedingParentUpdate.filter((n: Node) => { + // These restrictions only apply when moving INTO a subflow + if (potentialParentId) { + if (n.data?.type === 'starter') return false + const block = blocks[n.id] + if (block && TriggerUtils.isTriggerBlock(block)) return false + if (n.type === 'subflowNode') return false + } + return true + }) + + if (validNodes.length > 0) { + const movingNodeIds = new Set(validNodes.map((n: Node) => n.id)) + const boundaryEdges = edgesForDisplay.filter((e) => { + const sourceInSelection = movingNodeIds.has(e.source) + const targetInSelection = movingNodeIds.has(e.target) + return sourceInSelection !== targetInSelection + }) + + const rawUpdates = validNodes.map((n: Node) => { + const edgesForThisNode = boundaryEdges.filter( + (e) => e.source === n.id || e.target === n.id ) + const newPosition = potentialParentId + ? calculateRelativePosition(n.id, potentialParentId, true) + : getNodeAbsolutePosition(n.id) + return { + blockId: n.id, + newParentId: potentialParentId, + newPosition, + affectedEdges: edgesForThisNode, + } + }) + + let updates = rawUpdates + if (potentialParentId) { + const minX = Math.min(...rawUpdates.map((u) => u.newPosition.x)) + const minY = Math.min(...rawUpdates.map((u) => u.newPosition.y)) + + const targetMinX = CONTAINER_DIMENSIONS.LEFT_PADDING + const targetMinY = CONTAINER_DIMENSIONS.HEADER_HEIGHT + CONTAINER_DIMENSIONS.TOP_PADDING + + const shiftX = minX < targetMinX ? targetMinX - minX : 0 + const shiftY = minY < targetMinY ? targetMinY - minY : 0 + + updates = rawUpdates.map((u) => ({ + ...u, + newPosition: { + x: u.newPosition.x + shiftX, + y: u.newPosition.y + shiftY, + }, + })) } - } - return { id: node.id, position: finalPosition } - }) + collaborativeBatchUpdateParent(updates) - collaborativeBatchUpdatePositions(positionUpdates) + setDisplayNodes((nodes) => + nodes.map((node) => { + const update = updates.find((u) => u.blockId === node.id) + if (update) { + return { + ...node, + position: update.newPosition, + parentId: update.newParentId ?? undefined, + } + } + return node + }) + ) + + if (potentialParentId) { + resizeLoopNodesWrapper() + } + + logger.info('Batch moved selection to new parent', { + targetParentId: potentialParentId, + nodeCount: validNodes.length, + }) + } + } + + // Clear drag state + setDragStartPosition(null) + setPotentialParentId(null) + multiNodeDragStartRef.current.clear() }, - [blocks, getNodes, collaborativeBatchUpdatePositions] + [ + blocks, + getNodes, + getNodeAbsolutePosition, + collaborativeBatchUpdatePositions, + collaborativeBatchUpdateParent, + calculateRelativePosition, + resizeLoopNodesWrapper, + potentialParentId, + edgesForDisplay, + clearDragHighlights, + ] ) const onPaneClick = useCallback(() => { - setSelectedEdgeInfo(null) + setSelectedEdges(new Map()) usePanelEditorStore.getState().clearCurrentBlock() }, []) - /** Handles edge selection with container context tracking. */ + /** + * Handles node click to select the node in ReactFlow. + * This ensures clicking anywhere on a block (not just the drag handle) + * selects it for delete/backspace and multi-select operations. + */ + const handleNodeClick = useCallback( + (event: React.MouseEvent, node: Node) => { + const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey + + setNodes((nodes) => + nodes.map((n) => ({ + ...n, + selected: isMultiSelect ? (n.id === node.id ? true : n.selected) : n.id === node.id, + })) + ) + }, + [setNodes] + ) + + /** Handles edge selection with container context tracking and Shift-click multi-selection. */ const onEdgeClick = useCallback( (event: React.MouseEvent, edge: any) => { event.stopPropagation() // Prevent bubbling @@ -2664,11 +3104,21 @@ const WorkflowContent = React.memo(() => { // Create a unique identifier that combines edge ID and parent context const contextId = `${edge.id}${parentLoopId ? `-${parentLoopId}` : ''}` - setSelectedEdgeInfo({ - id: edge.id, - parentLoopId, - contextId, - }) + if (event.shiftKey) { + // Shift-click: toggle edge in selection + setSelectedEdges((prev) => { + const next = new Map(prev) + if (next.has(contextId)) { + next.delete(contextId) + } else { + next.set(contextId, edge.id) + } + return next + }) + } else { + // Normal click: replace selection with this edge + setSelectedEdges(new Map([[contextId, edge.id]])) + } }, [getNodes] ) @@ -2677,14 +3127,22 @@ const WorkflowContent = React.memo(() => { const handleEdgeDelete = useCallback( (edgeId: string) => { removeEdge(edgeId) - setSelectedEdgeInfo((current) => (current?.id === edgeId ? null : current)) + // Remove this edge from selection (find by edge ID value) + setSelectedEdges((prev) => { + const next = new Map(prev) + for (const [contextId, id] of next) { + if (id === edgeId) { + next.delete(contextId) + } + } + return next + }) }, [removeEdge] ) /** Transforms edges to include selection state and delete handlers. Memoized to prevent re-renders. */ const edgesWithSelection = useMemo(() => { - // Build node lookup map once - O(n) instead of O(n) per edge const nodeMap = new Map(displayNodes.map((n) => [n.id, n])) return edgesForDisplay.map((edge) => { @@ -2697,7 +3155,7 @@ const WorkflowContent = React.memo(() => { ...edge, data: { ...edge.data, - isSelected: selectedEdgeInfo?.contextId === edgeContextId, + isSelected: selectedEdges.has(edgeContextId), isInsideLoop: Boolean(parentLoopId), parentLoopId, sourceHandle: edge.sourceHandle, @@ -2705,7 +3163,7 @@ const WorkflowContent = React.memo(() => { }, } }) - }, [edgesForDisplay, displayNodes, selectedEdgeInfo?.contextId, handleEdgeDelete]) + }, [edgesForDisplay, displayNodes, selectedEdges, handleEdgeDelete]) /** Handles Delete/Backspace to remove selected edges or blocks. */ useEffect(() => { @@ -2715,20 +3173,16 @@ const WorkflowContent = React.memo(() => { } // Ignore when typing/navigating inside editable inputs or editors - const activeElement = document.activeElement - const isEditableElement = - activeElement instanceof HTMLInputElement || - activeElement instanceof HTMLTextAreaElement || - activeElement?.hasAttribute('contenteditable') - - if (isEditableElement) { + if (isInEditableElement()) { return } // Handle edge deletion first (edges take priority if selected) - if (selectedEdgeInfo) { - removeEdge(selectedEdgeInfo.id) - setSelectedEdgeInfo(null) + if (selectedEdges.size > 0) { + // Get all selected edge IDs and batch delete them + const edgeIds = Array.from(selectedEdges.values()) + collaborativeBatchRemoveEdges(edgeIds) + setSelectedEdges(new Map()) return } @@ -2750,8 +3204,8 @@ const WorkflowContent = React.memo(() => { window.addEventListener('keydown', handleKeyDown) return () => window.removeEventListener('keydown', handleKeyDown) }, [ - selectedEdgeInfo, - removeEdge, + selectedEdges, + collaborativeBatchRemoveEdges, getNodes, collaborativeBatchRemoveBlocks, effectivePermissions.canEdit, @@ -2808,6 +3262,7 @@ const WorkflowContent = React.memo(() => { connectionLineType={ConnectionLineType.SmoothStep} onPaneClick={onPaneClick} onEdgeClick={onEdgeClick} + onNodeClick={handleNodeClick} onPaneContextMenu={handlePaneContextMenu} onNodeContextMenu={handleNodeContextMenu} onSelectionContextMenu={handleSelectionContextMenu} @@ -2815,10 +3270,11 @@ const WorkflowContent = React.memo(() => { onPointerLeave={handleCanvasPointerLeave} elementsSelectable={true} selectionOnDrag={isShiftPressed || isSelectionDragActive} + selectionMode={SelectionMode.Partial} panOnDrag={isShiftPressed || isSelectionDragActive ? false : [0, 1]} onSelectionStart={onSelectionStart} onSelectionEnd={onSelectionEnd} - multiSelectionKeyCode={['Meta', 'Control']} + multiSelectionKeyCode={['Meta', 'Control', 'Shift']} nodesConnectable={effectivePermissions.canEdit} nodesDraggable={effectivePermissions.canEdit} draggable={false} @@ -2828,6 +3284,8 @@ const WorkflowContent = React.memo(() => { className={`workflow-container h-full transition-opacity duration-150 ${reactFlowStyles} ${isCanvasReady ? 'opacity-100' : 'opacity-0'}`} onNodeDrag={effectivePermissions.canEdit ? onNodeDrag : undefined} onNodeDragStop={effectivePermissions.canEdit ? onNodeDragStop : undefined} + onSelectionDragStart={effectivePermissions.canEdit ? onSelectionDragStart : undefined} + onSelectionDrag={effectivePermissions.canEdit ? onSelectionDrag : undefined} onSelectionDragStop={effectivePermissions.canEdit ? onSelectionDragStop : undefined} onNodeDragStart={effectivePermissions.canEdit ? onNodeDragStart : undefined} snapToGrid={snapToGrid} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block-details-sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block-details-sidebar.tsx index 2db914d741..5a48723c68 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block-details-sidebar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block-details-sidebar.tsx @@ -1,17 +1,28 @@ 'use client' -import { useEffect, useMemo, useState } from 'react' -import { ChevronDown as ChevronDownIcon, X } from 'lucide-react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { + ArrowDown, + ArrowUp, + ChevronDown as ChevronDownIcon, + ChevronUp, + RepeatIcon, + SplitIcon, + X, +} from 'lucide-react' import { ReactFlowProvider } from 'reactflow' -import { Badge, Button, ChevronDown, Code } from '@/components/emcn' +import { Badge, Button, ChevronDown, Code, Combobox, Input, Label } from '@/components/emcn' import { cn } from '@/lib/core/utils/cn' import { extractReferencePrefixes } from '@/lib/workflows/sanitization/references' +import { SnapshotContextMenu } from '@/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/components' import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components' +import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks' import { getBlock } from '@/blocks' import type { BlockConfig, BlockIcon, SubBlockConfig } from '@/blocks/types' import { normalizeName } from '@/executor/constants' import { navigatePath } from '@/executor/variables/resolvers/reference' -import type { BlockState } from '@/stores/workflows/workflow/types' +import { useCodeViewerFeatures } from '@/hooks/use-code-viewer' +import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types' /** * Evaluate whether a subblock's condition is met based on current values. @@ -87,16 +98,37 @@ interface ResolvedConnection { fields: Array<{ path: string; value: string; tag: string }> } +interface ExtractedReferences { + blockRefs: string[] + workflowVars: string[] + envVars: string[] +} + /** * Extract all variable references from nested subblock values */ -function extractAllReferencesFromSubBlocks(subBlockValues: Record): string[] { - const refs = new Set() +function extractAllReferencesFromSubBlocks( + subBlockValues: Record +): ExtractedReferences { + const blockRefs = new Set() + const workflowVars = new Set() + const envVars = new Set() const processValue = (value: unknown) => { if (typeof value === 'string') { const extracted = extractReferencePrefixes(value) - extracted.forEach((ref) => refs.add(ref.raw)) + for (const ref of extracted) { + if (ref.prefix === 'variable') { + workflowVars.add(ref.raw) + } else { + blockRefs.add(ref.raw) + } + } + + const envMatches = value.match(/\{\{([^}]+)\}\}/g) + if (envMatches) { + envMatches.forEach((match) => envVars.add(match)) + } } else if (Array.isArray(value)) { value.forEach(processValue) } else if (value && typeof value === 'object') { @@ -109,7 +141,11 @@ function extractAllReferencesFromSubBlocks(subBlockValues: Record void + contentRef?: React.RefObject + onContextMenu?: (e: React.MouseEvent) => void } /** * Collapsible section for execution data (input/output) * Uses Code.Viewer for proper syntax highlighting matching the logs UI */ -function ExecutionDataSection({ title, data, isError = false }: ExecutionDataSectionProps) { +function ExecutionDataSection({ + title, + data, + isError = false, + wrapText = true, + searchQuery, + currentMatchIndex = 0, + onMatchCountChange, + contentRef, + onContextMenu, +}: ExecutionDataSectionProps) { const [isExpanded, setIsExpanded] = useState(false) const jsonString = useMemo(() => { @@ -192,12 +244,17 @@ function ExecutionDataSection({ title, data, isError = false }: ExecutionDataSec No data
    ) : ( - +
    + +
    )} )} @@ -205,18 +262,54 @@ function ExecutionDataSection({ title, data, isError = false }: ExecutionDataSec ) } +interface ResolvedVariable { + ref: string + name: string + value: string +} + +interface ConnectionsSectionProps { + connections: ResolvedConnection[] + workflowVars: ResolvedVariable[] + envVars: ResolvedVariable[] + onContextMenu?: (e: React.MouseEvent, value: string) => void + /** Height of the connections section */ + height: number + /** Whether the section is being resized */ + isResizing: boolean + /** Whether the connections are at minimum height (collapsed) */ + isAtMinHeight: boolean + /** Handler for resize mouse down */ + onResizeMouseDown: (e: React.MouseEvent) => void + /** Handler for toggling collapsed state */ + onToggleCollapsed: () => void +} + /** * Section showing resolved variable references - styled like the connections section in editor */ -function ResolvedConnectionsSection({ connections }: { connections: ResolvedConnection[] }) { - const [isCollapsed, setIsCollapsed] = useState(false) +function ConnectionsSection({ + connections, + workflowVars, + envVars, + onContextMenu, + height, + isResizing, + isAtMinHeight, + onResizeMouseDown, + onToggleCollapsed, +}: ConnectionsSectionProps) { const [expandedBlocks, setExpandedBlocks] = useState>(new Set()) + const [expandedVariables, setExpandedVariables] = useState(true) + const [expandedEnvVars, setExpandedEnvVars] = useState(true) useEffect(() => { setExpandedBlocks(new Set(connections.map((c) => c.blockId))) }, [connections]) - if (connections.length === 0) return null + const hasContent = connections.length > 0 || workflowVars.length > 0 || envVars.length > 0 + + if (!hasContent) return null const toggleBlock = (blockId: string) => { setExpandedBlocks((prev) => { @@ -230,110 +323,220 @@ function ResolvedConnectionsSection({ connections }: { connections: ResolvedConn }) } + const handleValueContextMenu = (e: React.MouseEvent, value: string) => { + if (value && value !== '—' && value !== '[REDACTED]' && onContextMenu) { + onContextMenu(e, value) + } + } + return ( -
    +
    + {/* Resize Handle */} +
    +
    +
    + {/* Header with Chevron */}
    setIsCollapsed(!isCollapsed)} + onClick={onToggleCollapsed} onKeyDown={(e) => { if (e.key === 'Enter' || e.key === ' ') { e.preventDefault() - setIsCollapsed(!isCollapsed) + onToggleCollapsed() } }} role='button' tabIndex={0} - aria-label={isCollapsed ? 'Expand connections' : 'Collapse connections'} + aria-label={isAtMinHeight ? 'Expand connections' : 'Collapse connections'} > -
    Connections
    {/* Content - styled like ConnectionBlocks */} - {!isCollapsed && ( -
    - {connections.map((connection) => { - const blockConfig = getBlock(connection.blockType) - const Icon = blockConfig?.icon - const bgColor = blockConfig?.bgColor || '#6B7280' - const isExpanded = expandedBlocks.has(connection.blockId) - const hasFields = connection.fields.length > 0 - - return ( -
    - {/* Block header - styled like ConnectionItem */} +
    + {connections.map((connection) => { + const blockConfig = getBlock(connection.blockType) + const Icon = blockConfig?.icon + const bgColor = blockConfig?.bgColor || '#6B7280' + const isExpanded = expandedBlocks.has(connection.blockId) + const hasFields = connection.fields.length > 0 + + return ( +
    + {/* Block header - styled like ConnectionItem */} +
    hasFields && toggleBlock(connection.blockId)} + >
    hasFields && toggleBlock(connection.blockId)} + className='relative flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]' + style={{ background: bgColor }} > -
    - {Icon && ( - - )} -
    - - {connection.blockName} - - {hasFields && ( - )}
    + + {connection.blockName} + + {hasFields && ( + + )} +
    - {/* Fields - styled like FieldItem but showing resolved values */} - {isExpanded && hasFields && ( -
    -
    - {connection.fields.map((field) => ( -
    +
    + {connection.fields.map((field) => ( +
    handleValueContextMenu(e, field.value)} + > + - - {field.path} - - - {field.value} - -
    - ))} + {field.path} + + + {field.value} + +
    + ))} +
    + )} +
    + ) + })} + + {/* Workflow Variables */} + {workflowVars.length > 0 && ( +
    +
    setExpandedVariables(!expandedVariables)} + > +
    + V +
    + + Variables + + +
    + {expandedVariables && ( +
    +
    + {workflowVars.map((v) => ( +
    handleValueContextMenu(e, v.value)} + > + + {v.name} + + {v.value}
    + ))} +
    + )} +
    + )} + + {/* Environment Variables */} + {envVars.length > 0 && ( +
    +
    setExpandedEnvVars(!expandedEnvVars)} + > +
    + E +
    + + Environment Variables + + +
    + {expandedEnvVars && ( +
    +
    + {envVars.map((v) => ( +
    + + {v.name} + + {v.value} +
    + ))}
    - ) - })} -
    - )} + )} +
    + )} +
    ) } @@ -352,6 +555,165 @@ function IconComponent({ return } +/** + * Configuration for subflow types (loop and parallel) - matches use-subflow-editor.ts + */ +const SUBFLOW_CONFIG = { + loop: { + typeLabels: { + for: 'For Loop', + forEach: 'For Each', + while: 'While Loop', + doWhile: 'Do While Loop', + }, + maxIterations: 1000, + }, + parallel: { + typeLabels: { + count: 'Parallel Count', + collection: 'Parallel Each', + }, + maxIterations: 20, + }, +} as const + +interface SubflowConfigDisplayProps { + block: BlockState + loop?: Loop + parallel?: Parallel +} + +/** + * Display subflow (loop/parallel) configuration in preview mode. + * Matches the exact UI structure of SubflowEditor. + */ +function SubflowConfigDisplay({ block, loop, parallel }: SubflowConfigDisplayProps) { + const isLoop = block.type === 'loop' + const config = isLoop ? SUBFLOW_CONFIG.loop : SUBFLOW_CONFIG.parallel + + // Determine current type + const currentType = isLoop + ? loop?.loopType || (block.data?.loopType as string) || 'for' + : parallel?.parallelType || (block.data?.parallelType as string) || 'count' + + // Build type options for combobox - matches SubflowEditor + const typeOptions = Object.entries(config.typeLabels).map(([value, label]) => ({ + value, + label, + })) + + // Determine mode + const isCountMode = currentType === 'for' || currentType === 'count' + const isConditionMode = currentType === 'while' || currentType === 'doWhile' + + // Get iterations value + const iterations = isLoop + ? (loop?.iterations ?? (block.data?.count as number) ?? 5) + : (parallel?.count ?? (block.data?.count as number) ?? 1) + + // Get collection/condition value + const getEditorValue = (): string => { + if (isConditionMode && isLoop) { + if (currentType === 'while') { + return loop?.whileCondition || (block.data?.whileCondition as string) || '' + } + return loop?.doWhileCondition || (block.data?.doWhileCondition as string) || '' + } + + if (isLoop) { + const items = loop?.forEachItems ?? block.data?.collection + return typeof items === 'string' ? items : JSON.stringify(items) || '' + } + + const distribution = parallel?.distribution ?? block.data?.collection + return typeof distribution === 'string' ? distribution : JSON.stringify(distribution) || '' + } + + const editorValue = getEditorValue() + + // Get label for configuration field - matches SubflowEditor exactly + const getConfigLabel = (): string => { + if (isCountMode) { + return `${isLoop ? 'Loop' : 'Parallel'} Iterations` + } + if (isConditionMode) { + return 'While Condition' + } + return `${isLoop ? 'Collection' : 'Parallel'} Items` + } + + return ( +
    + {/* Type Selection - matches SubflowEditor */} +
    + + {}} + disabled + placeholder='Select type...' + /> +
    + + {/* Dashed Line Separator - matches SubflowEditor */} +
    +
    +
    + + {/* Configuration - matches SubflowEditor */} +
    + + + {isCountMode ? ( +
    + {}} + disabled + className='mb-[4px]' + /> +
    + Enter a number between 1 and {config.maxIterations} +
    +
    + ) : ( +
    + + + + {isConditionMode ? ' < 10' : "['item1', 'item2', 'item3']"} + +
    + {editorValue || ( + + {isConditionMode ? ' < 10' : "['item1', 'item2', 'item3']"} + + )} +
    +
    +
    +
    + )} +
    +
    + ) +} + interface ExecutionData { input?: unknown output?: unknown @@ -359,6 +721,13 @@ interface ExecutionData { durationMs?: number } +interface WorkflowVariable { + id: string + name: string + type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain' + value: unknown +} + interface BlockDetailsSidebarProps { block: BlockState executionData?: ExecutionData @@ -366,6 +735,12 @@ interface BlockDetailsSidebarProps { allBlockExecutions?: Record /** All workflow blocks for mapping block names to IDs */ workflowBlocks?: Record + /** Workflow variables for resolving variable references */ + workflowVariables?: Record + /** Loop configurations for subflow blocks */ + loops?: Record + /** Parallel configurations for subflow blocks */ + parallels?: Record /** When true, shows "Not Executed" badge if no executionData is provided */ isExecutionMode?: boolean /** Optional close handler - if not provided, no close button is shown */ @@ -380,6 +755,13 @@ function formatDuration(ms: number): string { return `${(ms / 1000).toFixed(2)}s` } +/** Minimum height for the connections section (header only) */ +const MIN_CONNECTIONS_HEIGHT = 30 +/** Maximum height for the connections section */ +const MAX_CONNECTIONS_HEIGHT = 300 +/** Default height for the connections section */ +const DEFAULT_CONNECTIONS_HEIGHT = 150 + /** * Readonly sidebar panel showing block configuration using SubBlock components. */ @@ -388,12 +770,160 @@ function BlockDetailsSidebarContent({ executionData, allBlockExecutions, workflowBlocks, + workflowVariables, + loops, + parallels, isExecutionMode = false, onClose, }: BlockDetailsSidebarProps) { + // Convert Record to Array for iteration + const normalizedWorkflowVariables = useMemo(() => { + if (!workflowVariables) return [] + return Object.values(workflowVariables) + }, [workflowVariables]) + const blockConfig = getBlock(block.type) as BlockConfig | undefined const subBlockValues = block.subBlocks || {} + const contentRef = useRef(null) + const subBlocksRef = useRef(null) + + // Connections resize state + const [connectionsHeight, setConnectionsHeight] = useState(DEFAULT_CONNECTIONS_HEIGHT) + const [isResizing, setIsResizing] = useState(false) + const startYRef = useRef(0) + const startHeightRef = useRef(0) + + const { + wrapText, + toggleWrapText, + isSearchActive, + searchQuery, + setSearchQuery, + matchCount, + currentMatchIndex, + activateSearch, + closeSearch, + goToNextMatch, + goToPreviousMatch, + handleMatchCountChange, + searchInputRef, + } = useCodeViewerFeatures({ contentRef }) + + const { + isOpen: isContextMenuOpen, + position: contextMenuPosition, + menuRef: contextMenuRef, + handleContextMenu, + closeMenu: closeContextMenu, + } = useContextMenu() + + const [contextMenuData, setContextMenuData] = useState({ content: '', copyOnly: false }) + + const openContextMenu = useCallback( + (e: React.MouseEvent, content: string, copyOnly: boolean) => { + setContextMenuData({ content, copyOnly }) + handleContextMenu(e) + }, + [handleContextMenu] + ) + + const handleExecutionContextMenu = useCallback( + (e: React.MouseEvent) => { + const parts: string[] = [] + if (executionData?.input) { + parts.push(`// Input\n${formatValueAsJson(executionData.input)}`) + } + if (executionData?.output) { + parts.push(`// Output\n${formatValueAsJson(executionData.output)}`) + } + if (parts.length > 0) { + openContextMenu(e, parts.join('\n\n'), false) + } + }, + [executionData, openContextMenu] + ) + + const handleSubblockContextMenu = useCallback( + (e: React.MouseEvent, config: SubBlockConfig) => { + if (config.password || config.type === 'oauth-input') return + + const valueObj = subBlockValues[config.id] + const value = + valueObj && typeof valueObj === 'object' && 'value' in valueObj + ? (valueObj as { value: unknown }).value + : valueObj + + if (value !== undefined && value !== null && value !== '') { + const content = typeof value === 'string' ? value : JSON.stringify(value, null, 2) + openContextMenu(e, content, true) + } + }, + [subBlockValues, openContextMenu] + ) + + const handleCopy = useCallback(() => { + if (contextMenuData.content) { + navigator.clipboard.writeText(contextMenuData.content) + } + }, [contextMenuData.content]) + + /** + * Handles mouse down event on the resize handle to initiate resizing + */ + const handleConnectionsResizeMouseDown = useCallback( + (e: React.MouseEvent) => { + setIsResizing(true) + startYRef.current = e.clientY + startHeightRef.current = connectionsHeight + }, + [connectionsHeight] + ) + + /** + * Toggle connections collapsed state + */ + const toggleConnectionsCollapsed = useCallback(() => { + setConnectionsHeight((prev) => + prev <= MIN_CONNECTIONS_HEIGHT ? DEFAULT_CONNECTIONS_HEIGHT : MIN_CONNECTIONS_HEIGHT + ) + }, []) + + /** + * Sets up resize event listeners during resize operations + */ + useEffect(() => { + if (!isResizing) return + + const handleMouseMove = (e: MouseEvent) => { + const deltaY = startYRef.current - e.clientY // Inverted because we're resizing from bottom up + let newHeight = startHeightRef.current + deltaY + + // Clamp height between fixed min and max for stable behavior + newHeight = Math.max(MIN_CONNECTIONS_HEIGHT, Math.min(MAX_CONNECTIONS_HEIGHT, newHeight)) + setConnectionsHeight(newHeight) + } + + const handleMouseUp = () => { + setIsResizing(false) + } + + document.addEventListener('mousemove', handleMouseMove) + document.addEventListener('mouseup', handleMouseUp) + document.body.style.cursor = 'ns-resize' + document.body.style.userSelect = 'none' + + return () => { + document.removeEventListener('mousemove', handleMouseMove) + document.removeEventListener('mouseup', handleMouseUp) + document.body.style.cursor = '' + document.body.style.userSelect = '' + } + }, [isResizing]) + + // Determine if connections are at minimum height (collapsed state) + const isConnectionsAtMinHeight = connectionsHeight <= MIN_CONNECTIONS_HEIGHT + 5 + const blockNameToId = useMemo(() => { const map = new Map() if (workflowBlocks) { @@ -432,18 +962,20 @@ function BlockDetailsSidebarContent({ } }, [allBlockExecutions, workflowBlocks, blockNameToId]) - // Group resolved variables by source block for display + const extractedRefs = useMemo( + () => extractAllReferencesFromSubBlocks(subBlockValues), + [subBlockValues] + ) + const resolvedConnections = useMemo((): ResolvedConnection[] => { if (!allBlockExecutions || !workflowBlocks) return [] - const allRefs = extractAllReferencesFromSubBlocks(subBlockValues) const seen = new Set() const blockMap = new Map() - for (const ref of allRefs) { + for (const ref of extractedRefs.blockRefs) { if (seen.has(ref)) continue - // Parse reference: const inner = ref.slice(1, -1) const parts = inner.split('.') if (parts.length < 1) continue @@ -461,7 +993,6 @@ function BlockDetailsSidebarContent({ seen.add(ref) - // Get or create block entry if (!blockMap.has(blockId)) { blockMap.set(blockId, { blockId, @@ -480,12 +1011,105 @@ function BlockDetailsSidebarContent({ } return Array.from(blockMap.values()) - }, [subBlockValues, allBlockExecutions, workflowBlocks, blockNameToId, resolveReference]) + }, [extractedRefs.blockRefs, allBlockExecutions, workflowBlocks, blockNameToId, resolveReference]) + + const resolvedWorkflowVars = useMemo((): ResolvedVariable[] => { + return extractedRefs.workflowVars.map((ref) => { + const inner = ref.slice(1, -1) + const parts = inner.split('.') + const varName = parts.slice(1).join('.') + + let value = '—' + if (normalizedWorkflowVariables.length > 0) { + const normalizedVarName = normalizeName(varName) + const matchedVar = normalizedWorkflowVariables.find( + (v) => normalizeName(v.name) === normalizedVarName + ) + if (matchedVar !== undefined) { + value = formatInlineValue(matchedVar.value) + } + } + + return { ref, name: varName, value } + }) + }, [extractedRefs.workflowVars, normalizedWorkflowVariables]) + + const resolvedEnvVars = useMemo((): ResolvedVariable[] => { + return extractedRefs.envVars.map((ref) => { + const varName = ref.slice(2, -2) + return { ref, name: varName, value: '[REDACTED]' } + }) + }, [extractedRefs.envVars]) + + // Check if this is a subflow block (loop or parallel) + const isSubflow = block.type === 'loop' || block.type === 'parallel' + const loopConfig = block.type === 'loop' ? loops?.[block.id] : undefined + const parallelConfig = block.type === 'parallel' ? parallels?.[block.id] : undefined + + // Handle subflow blocks + if (isSubflow) { + const isLoop = block.type === 'loop' + const SubflowIcon = isLoop ? RepeatIcon : SplitIcon + const subflowBgColor = isLoop ? '#2FB3FF' : '#FEE12B' + const subflowName = block.name || (isLoop ? 'Loop' : 'Parallel') + + return ( +
    + {/* Header - styled like subflow header */} +
    +
    + +
    + + {subflowName} + + {onClose && ( + + )} +
    + + {/* Subflow Configuration */} +
    +
    +
    + {/* CSS override to show full opacity and prevent interaction instead of dimmed disabled state */} + + +
    +
    +
    +
    + ) + } if (!blockConfig) { return ( -
    -
    +
    +
    {block.name || 'Unknown Block'} @@ -515,9 +1139,9 @@ function BlockDetailsSidebarContent({ : 'gray' return ( -
    +
    {/* Header - styled like editor */} -
    +
    - {/* Scrollable content */} -
    - {/* Not Executed Banner - shown when in execution mode but block wasn't executed */} - {isExecutionMode && !executionData && ( -
    -
    - - Not Executed - -
    -
    - )} - - {/* Execution Input/Output (if provided) */} - {executionData && - (executionData.input !== undefined || executionData.output !== undefined) ? ( -
    - {/* Execution Status & Duration Header */} - {(executionData.status || executionData.durationMs !== undefined) && ( -
    - {executionData.status && ( - - {executionData.status} + {/* Content area */} +
    + {/* Subblocks Section */} +
    +
    + {/* Not Executed Banner - shown when in execution mode but block wasn't executed */} + {isExecutionMode && !executionData && ( +
    +
    + + Not Executed - )} - {executionData.durationMs !== undefined && ( - - {formatDuration(executionData.durationMs)} - - )} +
    )} - {/* Divider between Status/Duration and Input/Output */} - {(executionData.status || executionData.durationMs !== undefined) && - (executionData.input !== undefined || executionData.output !== undefined) && ( -
    - )} + {/* Execution Input/Output (if provided) */} + {executionData && + (executionData.input !== undefined || executionData.output !== undefined) ? ( +
    + {/* Execution Status & Duration Header */} + {(executionData.status || executionData.durationMs !== undefined) && ( +
    + {executionData.status && ( + + {executionData.status} + + )} + {executionData.durationMs !== undefined && ( + + {formatDuration(executionData.durationMs)} + + )} +
    + )} - {/* Input Section */} - {executionData.input !== undefined && ( - - )} + {/* Divider between Status/Duration and Input/Output */} + {(executionData.status || executionData.durationMs !== undefined) && + (executionData.input !== undefined || executionData.output !== undefined) && ( +
    + )} - {/* Divider between Input and Output */} - {executionData.input !== undefined && executionData.output !== undefined && ( -
    - )} + {/* Input Section */} + {executionData.input !== undefined && ( + + )} - {/* Output Section */} - {executionData.output !== undefined && ( - - )} -
    - ) : null} + {/* Divider between Input and Output */} + {executionData.input !== undefined && executionData.output !== undefined && ( +
    + )} - {/* Subblock Values - Using SubBlock components in preview mode */} -
    - {/* CSS override to show full opacity and prevent interaction instead of dimmed disabled state */} - - {visibleSubBlocks.length > 0 ? ( -
    - {visibleSubBlocks.map((subBlockConfig, index) => ( -
    - - {index < visibleSubBlocks.length - 1 && ( -
    -
    0 ? ( +
    + {visibleSubBlocks.map((subBlockConfig, index) => ( +
    handleSubblockContextMenu(e, subBlockConfig)} + > + + {index < visibleSubBlocks.length - 1 && ( +
    +
    +
    + )}
    - )} + ))}
    - ))} -
    - ) : ( -
    -

    - No configurable fields for this block. -

    + ) : ( +
    +

    + No configurable fields for this block. +

    +
    + )}
    - )} +
    + + {/* Connections Section - Only show when there are connections */} + {(resolvedConnections.length > 0 || + resolvedWorkflowVars.length > 0 || + resolvedEnvVars.length > 0) && ( + openContextMenu(e, value, true)} + height={connectionsHeight} + isResizing={isResizing} + isAtMinHeight={isConnectionsAtMinHeight} + onResizeMouseDown={handleConnectionsResizeMouseDown} + onToggleCollapsed={toggleConnectionsCollapsed} + /> + )}
    - {/* Resolved Variables Section - Pinned at bottom, outside scrollable area */} - {resolvedConnections.length > 0 && ( - + {/* Search Overlay */} + {isSearchActive && ( +
    e.stopPropagation()} + > + setSearchQuery(e.target.value)} + placeholder='Search...' + className='mr-[2px] h-[23px] w-[94px] text-[12px]' + /> + 0 ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]' + )} + > + {matchCount > 0 ? `${currentMatchIndex + 1}/${matchCount}` : '0/0'} + + + + +
    )} + + {/* Context Menu */} +
    ) } diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block.tsx index 423ad95032..5725ec2fb2 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/block.tsx @@ -5,12 +5,19 @@ import { Handle, type NodeProps, Position } from 'reactflow' import { HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions' import { getBlock } from '@/blocks' +/** Execution status for blocks in preview mode */ +type ExecutionStatus = 'success' | 'error' | 'not-executed' + interface WorkflowPreviewBlockData { type: string name: string isTrigger?: boolean horizontalHandles?: boolean enabled?: boolean + /** Whether this block is selected in preview mode */ + isPreviewSelected?: boolean + /** Execution status for highlighting error/success states */ + executionStatus?: ExecutionStatus } /** @@ -21,18 +28,20 @@ interface WorkflowPreviewBlockData { * Used in template cards and other preview contexts for performance. */ function WorkflowPreviewBlockInner({ data }: NodeProps) { - const { type, name, isTrigger = false, horizontalHandles = false, enabled = true } = data + const { + type, + name, + isTrigger = false, + horizontalHandles = false, + enabled = true, + isPreviewSelected = false, + executionStatus, + } = data const blockConfig = getBlock(type) - if (!blockConfig) { - return null - } - - const IconComponent = blockConfig.icon - const isStarterOrTrigger = blockConfig.category === 'triggers' || type === 'starter' || isTrigger const visibleSubBlocks = useMemo(() => { - if (!blockConfig.subBlocks) return [] + if (!blockConfig?.subBlocks) return [] return blockConfig.subBlocks.filter((subBlock) => { if (subBlock.hidden) return false @@ -41,7 +50,14 @@ function WorkflowPreviewBlockInner({ data }: NodeProps if (subBlock.mode === 'advanced') return false return true }) - }, [blockConfig.subBlocks]) + }, [blockConfig?.subBlocks]) + + if (!blockConfig) { + return null + } + + const IconComponent = blockConfig.icon + const isStarterOrTrigger = blockConfig.category === 'triggers' || type === 'starter' || isTrigger const hasSubBlocks = visibleSubBlocks.length > 0 const showErrorRow = !isStarterOrTrigger @@ -49,8 +65,24 @@ function WorkflowPreviewBlockInner({ data }: NodeProps const horizontalHandleClass = '!border-none !bg-[var(--surface-7)] !h-5 !w-[7px] !rounded-[2px]' const verticalHandleClass = '!border-none !bg-[var(--surface-7)] !h-[7px] !w-5 !rounded-[2px]' + const hasError = executionStatus === 'error' + const hasSuccess = executionStatus === 'success' + return (
    + {/* Selection ring overlay (takes priority over execution rings) */} + {isPreviewSelected && ( +
    + )} + {/* Success ring overlay (only shown if not selected) */} + {!isPreviewSelected && hasSuccess && ( +
    + )} + {/* Error ring overlay (only shown if not selected) */} + {!isPreviewSelected && hasError && ( +
    + )} + {/* Target handle - not shown for triggers/starters */} {!isStarterOrTrigger && ( ) } -export const WorkflowPreviewBlock = memo(WorkflowPreviewBlockInner) +function shouldSkipPreviewBlockRender( + prevProps: NodeProps, + nextProps: NodeProps +): boolean { + return ( + prevProps.id === nextProps.id && + prevProps.data.type === nextProps.data.type && + prevProps.data.name === nextProps.data.name && + prevProps.data.isTrigger === nextProps.data.isTrigger && + prevProps.data.horizontalHandles === nextProps.data.horizontalHandles && + prevProps.data.enabled === nextProps.data.enabled && + prevProps.data.isPreviewSelected === nextProps.data.isPreviewSelected && + prevProps.data.executionStatus === nextProps.data.executionStatus + ) +} + +export const WorkflowPreviewBlock = memo(WorkflowPreviewBlockInner, shouldSkipPreviewBlockRender) diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/subflow.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/subflow.tsx index 67befddbda..99a0e8ca9c 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/subflow.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/preview/components/subflow.tsx @@ -10,6 +10,8 @@ interface WorkflowPreviewSubflowData { width?: number height?: number kind: 'loop' | 'parallel' + /** Whether this subflow is selected in preview mode */ + isPreviewSelected?: boolean } /** @@ -19,7 +21,7 @@ interface WorkflowPreviewSubflowData { * Used in template cards and other preview contexts for performance. */ function WorkflowPreviewSubflowInner({ data }: NodeProps) { - const { name, width = 500, height = 300, kind } = data + const { name, width = 500, height = 300, kind, isPreviewSelected = false } = data const isLoop = kind === 'loop' const BlockIcon = isLoop ? RepeatIcon : SplitIcon @@ -42,6 +44,11 @@ function WorkflowPreviewSubflowInner({ data }: NodeProps + {/* Selection ring overlay */} + {isPreviewSelected && ( +
    + )} + {/* Target handle on left (input to the subflow) */} - {/* Header - matches actual subflow header */} -
    -
    - + {/* Header - matches actual subflow header structure */} +
    +
    +
    + +
    + + {blockName} +
    - - {blockName} -
    - {/* Start handle inside - connects to first block in subflow */} -
    - Start - + {/* Content area - matches workflow structure */} +
    + {/* Subflow Start - connects to first block in subflow */} +
    + Start + +
    {/* End source handle on right (output from the subflow) */} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/preview/index.ts b/apps/sim/app/workspace/[workspaceId]/w/components/preview/index.ts index 4c959e26d3..89c096d6eb 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/preview/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/components/preview/index.ts @@ -1,2 +1,2 @@ export { BlockDetailsSidebar } from './components/block-details-sidebar' -export { WorkflowPreview } from './preview' +export { getLeftmostBlockId, WorkflowPreview } from './preview' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/preview/preview.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/preview/preview.tsx index b617d3da59..af554710cb 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/preview/preview.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/preview/preview.tsx @@ -1,6 +1,6 @@ 'use client' -import { useEffect, useMemo } from 'react' +import { useEffect, useMemo, useRef } from 'react' import ReactFlow, { ConnectionLineType, type Edge, @@ -14,23 +14,114 @@ import 'reactflow/dist/style.css' import { createLogger } from '@sim/logger' import { cn } from '@/lib/core/utils/cn' +import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions' import { NoteBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/note-block/note-block' import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node' import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block' import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge' +import { estimateBlockDimensions } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities' import { WorkflowPreviewBlock } from '@/app/workspace/[workspaceId]/w/components/preview/components/block' import { WorkflowPreviewSubflow } from '@/app/workspace/[workspaceId]/w/components/preview/components/subflow' import { getBlock } from '@/blocks' -import type { WorkflowState } from '@/stores/workflows/workflow/types' +import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types' const logger = createLogger('WorkflowPreview') +/** + * Gets block dimensions for preview purposes. + * For containers, uses stored dimensions or defaults. + * For regular blocks, uses stored height or estimates based on type. + */ +function getPreviewBlockDimensions(block: BlockState): { width: number; height: number } { + if (block.type === 'loop' || block.type === 'parallel') { + return { + width: block.data?.width + ? Math.max(block.data.width, CONTAINER_DIMENSIONS.MIN_WIDTH) + : CONTAINER_DIMENSIONS.DEFAULT_WIDTH, + height: block.data?.height + ? Math.max(block.data.height, CONTAINER_DIMENSIONS.MIN_HEIGHT) + : CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, + } + } + + if (block.height) { + return { + width: BLOCK_DIMENSIONS.FIXED_WIDTH, + height: Math.max(block.height, BLOCK_DIMENSIONS.MIN_HEIGHT), + } + } + + return estimateBlockDimensions(block.type) +} + +/** + * Calculates container dimensions based on child block positions and sizes. + * Mirrors the logic from useNodeUtilities.calculateLoopDimensions. + */ +function calculateContainerDimensions( + containerId: string, + blocks: Record +): { width: number; height: number } { + const childBlocks = Object.values(blocks).filter((block) => block?.data?.parentId === containerId) + + if (childBlocks.length === 0) { + return { + width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH, + height: CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, + } + } + + let maxRight = 0 + let maxBottom = 0 + + for (const child of childBlocks) { + if (!child?.position) continue + + const { width: childWidth, height: childHeight } = getPreviewBlockDimensions(child) + + maxRight = Math.max(maxRight, child.position.x + childWidth) + maxBottom = Math.max(maxBottom, child.position.y + childHeight) + } + + const width = Math.max( + CONTAINER_DIMENSIONS.DEFAULT_WIDTH, + maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING + ) + const height = Math.max( + CONTAINER_DIMENSIONS.DEFAULT_HEIGHT, + maxBottom + CONTAINER_DIMENSIONS.BOTTOM_PADDING + ) + + return { width, height } +} + +/** + * Finds the leftmost block ID from a workflow state. + * Returns the block with the smallest x position, excluding subflow containers (loop/parallel). + */ +export function getLeftmostBlockId(workflowState: WorkflowState | null | undefined): string | null { + if (!workflowState?.blocks) return null + + let leftmostId: string | null = null + let minX = Number.POSITIVE_INFINITY + + for (const [blockId, block] of Object.entries(workflowState.blocks)) { + if (!block || block.type === 'loop' || block.type === 'parallel') continue + const x = block.position?.x ?? Number.POSITIVE_INFINITY + if (x < minX) { + minX = x + leftmostId = blockId + } + } + + return leftmostId +} + /** Execution status for edges/nodes in the preview */ type ExecutionStatus = 'success' | 'error' | 'not-executed' interface WorkflowPreviewProps { workflowState: WorkflowState - showSubBlocks?: boolean className?: string height?: string | number width?: string | number @@ -39,12 +130,18 @@ interface WorkflowPreviewProps { defaultZoom?: number fitPadding?: number onNodeClick?: (blockId: string, mousePosition: { x: number; y: number }) => void + /** Callback when a node is right-clicked */ + onNodeContextMenu?: (blockId: string, mousePosition: { x: number; y: number }) => void + /** Callback when the canvas (empty area) is clicked */ + onPaneClick?: () => void /** Use lightweight blocks for better performance in template cards */ lightweight?: boolean /** Cursor style to show when hovering the canvas */ cursorStyle?: 'default' | 'pointer' | 'grab' /** Map of executed block IDs to their status for highlighting the execution path */ executedBlocks?: Record + /** Currently selected block ID for highlighting */ + selectedBlockId?: string | null } /** @@ -73,44 +170,49 @@ const edgeTypes: EdgeTypes = { } interface FitViewOnChangeProps { - nodes: Node[] + nodeIds: string fitPadding: number } /** - * Helper component that calls fitView when nodes change. + * Helper component that calls fitView when the set of nodes changes. + * Only triggers on actual node additions/removals, not on selection changes. * Must be rendered inside ReactFlowProvider. */ -function FitViewOnChange({ nodes, fitPadding }: FitViewOnChangeProps) { +function FitViewOnChange({ nodeIds, fitPadding }: FitViewOnChangeProps) { const { fitView } = useReactFlow() + const hasFittedRef = useRef(false) useEffect(() => { - if (nodes.length > 0) { + if (nodeIds.length > 0 && !hasFittedRef.current) { + hasFittedRef.current = true // Small delay to ensure nodes are rendered before fitting const timeoutId = setTimeout(() => { fitView({ padding: fitPadding, duration: 200 }) }, 50) return () => clearTimeout(timeoutId) } - }, [nodes, fitPadding, fitView]) + }, [nodeIds, fitPadding, fitView]) return null } export function WorkflowPreview({ workflowState, - showSubBlocks = true, className, height = '100%', width = '100%', - isPannable = false, + isPannable = true, defaultPosition, defaultZoom = 0.8, fitPadding = 0.25, onNodeClick, + onNodeContextMenu, + onPaneClick, lightweight = false, cursorStyle = 'grab', executedBlocks, + selectedBlockId, }: WorkflowPreviewProps) { const nodeTypes = lightweight ? lightweightNodeTypes : fullNodeTypes const isValidWorkflowState = workflowState?.blocks && workflowState.edges @@ -184,6 +286,8 @@ export function WorkflowPreview({ if (lightweight) { if (block.type === 'loop' || block.type === 'parallel') { + const isSelected = selectedBlockId === blockId + const dimensions = calculateContainerDimensions(blockId, workflowState.blocks) nodeArray.push({ id: blockId, type: 'subflowNode', @@ -191,31 +295,56 @@ export function WorkflowPreview({ draggable: false, data: { name: block.name, - width: block.data?.width || 500, - height: block.data?.height || 300, + width: dimensions.width, + height: dimensions.height, kind: block.type as 'loop' | 'parallel', + isPreviewSelected: isSelected, }, }) return } + const isSelected = selectedBlockId === blockId + + let lightweightExecutionStatus: ExecutionStatus | undefined + if (executedBlocks) { + const blockExecution = executedBlocks[blockId] + if (blockExecution) { + if (blockExecution.status === 'error') { + lightweightExecutionStatus = 'error' + } else if (blockExecution.status === 'success') { + lightweightExecutionStatus = 'success' + } else { + lightweightExecutionStatus = 'not-executed' + } + } else { + lightweightExecutionStatus = 'not-executed' + } + } + nodeArray.push({ id: blockId, type: 'workflowBlock', position: absolutePosition, draggable: false, + // Blocks inside subflows need higher z-index to appear above the container + zIndex: block.data?.parentId ? 10 : undefined, data: { type: block.type, name: block.name, isTrigger: block.triggerMode === true, horizontalHandles: block.horizontalHandles ?? false, enabled: block.enabled ?? true, + isPreviewSelected: isSelected, + executionStatus: lightweightExecutionStatus, }, }) return } if (block.type === 'loop') { + const isSelected = selectedBlockId === blockId + const dimensions = calculateContainerDimensions(blockId, workflowState.blocks) nodeArray.push({ id: blockId, type: 'subflowNode', @@ -226,10 +355,11 @@ export function WorkflowPreview({ data: { ...block.data, name: block.name, - width: block.data?.width || 500, - height: block.data?.height || 300, + width: dimensions.width, + height: dimensions.height, state: 'valid', isPreview: true, + isPreviewSelected: isSelected, kind: 'loop', }, }) @@ -237,6 +367,8 @@ export function WorkflowPreview({ } if (block.type === 'parallel') { + const isSelected = selectedBlockId === blockId + const dimensions = calculateContainerDimensions(blockId, workflowState.blocks) nodeArray.push({ id: blockId, type: 'subflowNode', @@ -247,10 +379,11 @@ export function WorkflowPreview({ data: { ...block.data, name: block.name, - width: block.data?.width || 500, - height: block.data?.height || 300, + width: dimensions.width, + height: dimensions.height, state: 'valid', isPreview: true, + isPreviewSelected: isSelected, kind: 'parallel', }, }) @@ -281,15 +414,15 @@ export function WorkflowPreview({ } } + const isSelected = selectedBlockId === blockId + nodeArray.push({ id: blockId, type: nodeType, position: absolutePosition, draggable: false, - className: - executionStatus && executionStatus !== 'not-executed' - ? `execution-${executionStatus}` - : undefined, + // Blocks inside subflows need higher z-index to appear above the container + zIndex: block.data?.parentId ? 10 : undefined, data: { type: block.type, config: blockConfig, @@ -297,6 +430,7 @@ export function WorkflowPreview({ blockState: block, canEdit: false, isPreview: true, + isPreviewSelected: isSelected, subBlockValues: block.subBlocks ?? {}, executionStatus, }, @@ -308,11 +442,11 @@ export function WorkflowPreview({ blocksStructure, loopsStructure, parallelsStructure, - showSubBlocks, workflowState.blocks, isValidWorkflowState, lightweight, executedBlocks, + selectedBlockId, ]) const edges: Edge[] = useMemo(() => { @@ -325,9 +459,8 @@ export function WorkflowPreview({ const targetExecuted = executedBlocks[edge.target] if (sourceExecuted && targetExecuted) { - if (targetExecuted.status === 'error') { - executionStatus = 'error' - } else if (sourceExecuted.status === 'success' && targetExecuted.status === 'success') { + // Edge is success if source succeeded and target was executed (even if target errored) + if (sourceExecuted.status === 'success') { executionStatus = 'success' } else { executionStatus = 'not-executed' @@ -344,6 +477,8 @@ export function WorkflowPreview({ sourceHandle: edge.sourceHandle, targetHandle: edge.targetHandle, data: executionStatus ? { executionStatus } : undefined, + // Raise executed edges above default edges + zIndex: executionStatus === 'success' ? 10 : 0, } }) }, [edgesStructure, workflowState.edges, isValidWorkflowState, executedBlocks]) @@ -368,20 +503,19 @@ export function WorkflowPreview({
    { + event.preventDefault() + event.stopPropagation() + onNodeContextMenu(node.id, { x: event.clientX, y: event.clientY }) + } + : undefined + } + onPaneClick={onPaneClick} /> - +
    ) diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx index 63c6748519..050f2757cc 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx @@ -165,7 +165,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) { const { data: session } = useSession() const queryClient = useQueryClient() const { data: organizationsData } = useOrganizations() - const { data: subscriptionData } = useSubscriptionData() + const { data: subscriptionData } = useSubscriptionData({ enabled: isBillingEnabled }) const { data: ssoProvidersData, isLoading: isLoadingSSO } = useSSOProviders() const activeOrganization = organizationsData?.activeOrganization diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts index 77b7637bbd..f2a0d13f12 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts @@ -4,6 +4,7 @@ import JSZip from 'jszip' import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer' import { useFolderStore } from '@/stores/folders/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import type { Variable } from '@/stores/workflows/workflow/types' const logger = createLogger('useExportWorkflow') @@ -122,17 +123,12 @@ export function useExportWorkflow({ continue } - // Fetch workflow variables + // Fetch workflow variables (API returns Record format directly) const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`) - let workflowVariables: any[] = [] + let workflowVariables: Record | undefined if (variablesResponse.ok) { const variablesData = await variablesResponse.json() - workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({ - id: v.id, - name: v.name, - type: v.type, - value: v.value, - })) + workflowVariables = variablesData?.data } // Prepare export state diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workspace.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workspace.ts index 6856cc099b..1d25315a3e 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workspace.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workspace.ts @@ -2,8 +2,10 @@ import { useCallback, useState } from 'react' import { createLogger } from '@sim/logger' import { exportWorkspaceToZip, + type FolderExportData, type WorkflowExportData, } from '@/lib/workflows/operations/import-export' +import type { Variable } from '@/stores/workflows/workflow/types' const logger = createLogger('useExportWorkspace') @@ -74,15 +76,10 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {}) } const variablesResponse = await fetch(`/api/workflows/${workflow.id}/variables`) - let workflowVariables: any[] = [] + let workflowVariables: Record | undefined if (variablesResponse.ok) { const variablesData = await variablesResponse.json() - workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({ - id: v.id, - name: v.name, - type: v.type, - value: v.value, - })) + workflowVariables = variablesData?.data } workflowsToExport.push({ @@ -101,15 +98,13 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {}) } } - const foldersToExport: Array<{ - id: string - name: string - parentId: string | null - }> = (foldersData.folders || []).map((folder: any) => ({ - id: folder.id, - name: folder.name, - parentId: folder.parentId, - })) + const foldersToExport: FolderExportData[] = (foldersData.folders || []).map( + (folder: FolderExportData) => ({ + id: folder.id, + name: folder.name, + parentId: folder.parentId, + }) + ) const zipBlob = await exportWorkspaceToZip( workspaceName, diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts index 00c46a00a3..d4f294e7f1 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts @@ -79,21 +79,36 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) { body: JSON.stringify(workflowData), }) - // Save variables if any - if (workflowData.variables && workflowData.variables.length > 0) { - const variablesPayload = workflowData.variables.map((v: any) => ({ - id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(), - workflowId: newWorkflowId, - name: v.name, - type: v.type, - value: v.value, - })) - - await fetch(`/api/workflows/${newWorkflowId}/variables`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ variables: variablesPayload }), - }) + // Save variables if any (handle both legacy Array and current Record formats) + if (workflowData.variables) { + // Convert to Record format for API (handles backwards compatibility with old Array exports) + const variablesArray = Array.isArray(workflowData.variables) + ? workflowData.variables + : Object.values(workflowData.variables) + + if (variablesArray.length > 0) { + const variablesRecord: Record< + string, + { id: string; workflowId: string; name: string; type: string; value: unknown } + > = {} + + for (const v of variablesArray) { + const id = typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID() + variablesRecord[id] = { + id, + workflowId: newWorkflowId, + name: v.name, + type: v.type, + value: v.value, + } + } + + await fetch(`/api/workflows/${newWorkflowId}/variables`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ variables: variablesRecord }), + }) + } } logger.info(`Imported workflow: ${workflowName}`) diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts index b71487734b..1ad051307b 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workspace.ts @@ -159,21 +159,36 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {}) continue } - // Save variables if any - if (workflowData.variables && workflowData.variables.length > 0) { - const variablesPayload = workflowData.variables.map((v: any) => ({ - id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(), - workflowId: newWorkflow.id, - name: v.name, - type: v.type, - value: v.value, - })) - - await fetch(`/api/workflows/${newWorkflow.id}/variables`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ variables: variablesPayload }), - }) + // Save variables if any (handle both legacy Array and current Record formats) + if (workflowData.variables) { + // Convert to Record format for API (handles backwards compatibility with old Array exports) + const variablesArray = Array.isArray(workflowData.variables) + ? workflowData.variables + : Object.values(workflowData.variables) + + if (variablesArray.length > 0) { + const variablesRecord: Record< + string, + { id: string; workflowId: string; name: string; type: string; value: unknown } + > = {} + + for (const v of variablesArray) { + const id = typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID() + variablesRecord[id] = { + id, + workflowId: newWorkflow.id, + name: v.name, + type: v.type, + value: v.value, + } + } + + await fetch(`/api/workflows/${newWorkflow.id}/variables`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ variables: variablesRecord }), + }) + } } logger.info(`Imported workflow: ${workflowName}`) diff --git a/apps/sim/components/emails/components/email-footer.tsx b/apps/sim/components/emails/components/email-footer.tsx index 76ef355ee3..1e6f7bf424 100644 --- a/apps/sim/components/emails/components/email-footer.tsx +++ b/apps/sim/components/emails/components/email-footer.tsx @@ -112,7 +112,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }: {brand.name} - {isHosted && <>, 80 Langton St, San Francisco, CA 94133, USA} + {isHosted && <>, 80 Langton St, San Francisco, CA 94103, USA}   diff --git a/apps/sim/executor/__test-utils__/executor-mocks.ts b/apps/sim/executor/__test-utils__/executor-mocks.ts index 052a861988..efe146ac56 100644 --- a/apps/sim/executor/__test-utils__/executor-mocks.ts +++ b/apps/sim/executor/__test-utils__/executor-mocks.ts @@ -427,9 +427,7 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({ input: 'json', }, outputs: { - response: { - input: 'json', - }, + response: { type: 'json', description: 'Input response' }, }, enabled: true, metadata: { id: 'starter', name: 'Starter Block' }, @@ -444,11 +442,9 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({ headers: 'json', }, outputs: { - response: { - data: 'json', - status: 'number', - headers: 'json', - }, + data: { type: 'json', description: 'Response data' }, + status: { type: 'number', description: 'Response status' }, + headers: { type: 'json', description: 'Response headers' }, }, enabled: true, metadata: { id: 'response', name: 'Response Block' }, diff --git a/apps/sim/executor/constants.ts b/apps/sim/executor/constants.ts index f483bbfc78..7a5d06f405 100644 --- a/apps/sim/executor/constants.ts +++ b/apps/sim/executor/constants.ts @@ -1,3 +1,5 @@ +import type { LoopType, ParallelType } from '@/lib/workflows/types' + export enum BlockType { PARALLEL = 'parallel', LOOP = 'loop', @@ -40,12 +42,8 @@ export const METADATA_ONLY_BLOCK_TYPES = [ BlockType.NOTE, ] as const -export type LoopType = 'for' | 'forEach' | 'while' | 'doWhile' - export type SentinelType = 'start' | 'end' -export type ParallelType = 'collection' | 'count' - export const EDGE = { CONDITION_PREFIX: 'condition-', CONDITION_TRUE: 'condition-true', diff --git a/apps/sim/executor/handlers/router/router-handler.ts b/apps/sim/executor/handlers/router/router-handler.ts index b00cc0f6ea..d702a1b80f 100644 --- a/apps/sim/executor/handlers/router/router-handler.ts +++ b/apps/sim/executor/handlers/router/router-handler.ts @@ -366,12 +366,12 @@ export class RouterBlockHandler implements BlockHandler { let systemPrompt = '' if (isAgentBlockType(targetBlock.metadata?.id)) { + const paramsPrompt = targetBlock.config?.params?.systemPrompt + const inputsPrompt = targetBlock.inputs?.systemPrompt systemPrompt = - targetBlock.config?.params?.systemPrompt || targetBlock.inputs?.systemPrompt || '' - - if (!systemPrompt && targetBlock.inputs) { - systemPrompt = targetBlock.inputs.systemPrompt || '' - } + (typeof paramsPrompt === 'string' ? paramsPrompt : '') || + (typeof inputsPrompt === 'string' ? inputsPrompt : '') || + '' } return { diff --git a/apps/sim/hooks/queries/subscription.ts b/apps/sim/hooks/queries/subscription.ts index 89ded91231..b0e40ef6c9 100644 --- a/apps/sim/hooks/queries/subscription.ts +++ b/apps/sim/hooks/queries/subscription.ts @@ -28,6 +28,8 @@ async function fetchSubscriptionData(includeOrg = false) { interface UseSubscriptionDataOptions { /** Include organization membership and role data */ includeOrg?: boolean + /** Whether to enable the query (defaults to true) */ + enabled?: boolean } /** @@ -35,13 +37,14 @@ interface UseSubscriptionDataOptions { * @param options - Optional configuration */ export function useSubscriptionData(options: UseSubscriptionDataOptions = {}) { - const { includeOrg = false } = options + const { includeOrg = false, enabled = true } = options return useQuery({ queryKey: subscriptionKeys.user(includeOrg), queryFn: () => fetchSubscriptionData(includeOrg), staleTime: 30 * 1000, placeholderData: keepPreviousData, + enabled, }) } @@ -58,17 +61,25 @@ async function fetchUsageLimitData() { return response.json() } +interface UseUsageLimitDataOptions { + /** Whether to enable the query (defaults to true) */ + enabled?: boolean +} + /** * Hook to fetch usage limit metadata * Returns: currentLimit, minimumLimit, canEdit, plan, updatedAt * Use this for editing usage limits, not for displaying current usage */ -export function useUsageLimitData() { +export function useUsageLimitData(options: UseUsageLimitDataOptions = {}) { + const { enabled = true } = options + return useQuery({ queryKey: subscriptionKeys.usage(), queryFn: fetchUsageLimitData, staleTime: 30 * 1000, placeholderData: keepPreviousData, + enabled, }) } diff --git a/apps/sim/hooks/use-code-viewer.ts b/apps/sim/hooks/use-code-viewer.ts new file mode 100644 index 0000000000..52d0300970 --- /dev/null +++ b/apps/sim/hooks/use-code-viewer.ts @@ -0,0 +1,155 @@ +'use client' + +import { useCallback, useEffect, useRef, useState } from 'react' + +interface UseCodeViewerFeaturesOptions { + /** Reference to the content container for scroll-to-match functionality */ + contentRef?: React.RefObject + /** Initial wrap text state (ignored if externalWrapText is provided) */ + initialWrapText?: boolean + /** External wrap text state (e.g., from Zustand store) */ + externalWrapText?: boolean + /** External setter for wrap text (required if externalWrapText is provided) */ + onWrapTextChange?: (wrap: boolean) => void + /** Callback when escape is pressed (optional, for custom handling) */ + onEscape?: () => void +} + +interface UseCodeViewerFeaturesReturn { + wrapText: boolean + setWrapText: (wrap: boolean) => void + toggleWrapText: () => void + + isSearchActive: boolean + searchQuery: string + setSearchQuery: (query: string) => void + matchCount: number + currentMatchIndex: number + activateSearch: () => void + closeSearch: () => void + goToNextMatch: () => void + goToPreviousMatch: () => void + handleMatchCountChange: (count: number) => void + searchInputRef: React.RefObject +} + +/** + * Reusable hook for Code.Viewer features: search and wrap text functionality. + * Supports both internal state and external state (e.g., from Zustand) for wrapText. + */ +export function useCodeViewerFeatures( + options: UseCodeViewerFeaturesOptions = {} +): UseCodeViewerFeaturesReturn { + const { + contentRef, + initialWrapText = true, + externalWrapText, + onWrapTextChange, + onEscape, + } = options + + // Use external state if provided, otherwise use internal state + const [internalWrapText, setInternalWrapText] = useState(initialWrapText) + const wrapText = externalWrapText !== undefined ? externalWrapText : internalWrapText + const setWrapText = onWrapTextChange ?? setInternalWrapText + + const [isSearchActive, setIsSearchActive] = useState(false) + const [searchQuery, setSearchQuery] = useState('') + const [matchCount, setMatchCount] = useState(0) + const [currentMatchIndex, setCurrentMatchIndex] = useState(0) + const searchInputRef = useRef(null) + + const toggleWrapText = useCallback(() => { + setWrapText(!wrapText) + }, [wrapText, setWrapText]) + + const activateSearch = useCallback(() => { + setIsSearchActive(true) + setTimeout(() => { + searchInputRef.current?.focus() + }, 0) + }, []) + + const closeSearch = useCallback(() => { + setIsSearchActive(false) + setSearchQuery('') + setMatchCount(0) + setCurrentMatchIndex(0) + }, []) + + const goToNextMatch = useCallback(() => { + if (matchCount === 0) return + setCurrentMatchIndex((prev) => (prev + 1) % matchCount) + }, [matchCount]) + + const goToPreviousMatch = useCallback(() => { + if (matchCount === 0) return + setCurrentMatchIndex((prev) => (prev - 1 + matchCount) % matchCount) + }, [matchCount]) + + const handleMatchCountChange = useCallback((count: number) => { + setMatchCount(count) + setCurrentMatchIndex(0) + }, []) + + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (e.key === 'Escape' && isSearchActive) { + e.preventDefault() + closeSearch() + onEscape?.() + } + } + + window.addEventListener('keydown', handleKeyDown) + return () => window.removeEventListener('keydown', handleKeyDown) + }, [isSearchActive, closeSearch, onEscape]) + + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (!isSearchActive) return + + const isSearchInputFocused = document.activeElement === searchInputRef.current + + if (e.key === 'Enter' && isSearchInputFocused && matchCount > 0) { + e.preventDefault() + if (e.shiftKey) { + goToPreviousMatch() + } else { + goToNextMatch() + } + } + } + + window.addEventListener('keydown', handleKeyDown) + return () => window.removeEventListener('keydown', handleKeyDown) + }, [isSearchActive, matchCount, goToNextMatch, goToPreviousMatch]) + + useEffect(() => { + if (!isSearchActive || matchCount === 0 || !contentRef?.current) return + + const matchElements = contentRef.current.querySelectorAll('[data-search-match]') + const currentElement = matchElements[currentMatchIndex] + + if (currentElement) { + currentElement.scrollIntoView({ block: 'center' }) + } + }, [currentMatchIndex, isSearchActive, matchCount, contentRef]) + + return { + wrapText, + setWrapText, + toggleWrapText, + isSearchActive, + searchQuery, + setSearchQuery, + matchCount, + currentMatchIndex, + activateSearch, + closeSearch, + goToNextMatch, + goToPreviousMatch, + handleMatchCountChange, + searchInputRef, + } +} diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts index 4df0e00f40..ba6fda4e1c 100644 --- a/apps/sim/hooks/use-collaborative-workflow.ts +++ b/apps/sim/hooks/use-collaborative-workflow.ts @@ -6,6 +6,17 @@ import { TriggerUtils } from '@/lib/workflows/triggers/triggers' import { useSocket } from '@/app/workspace/providers/socket-provider' import { getBlock } from '@/blocks' import { useUndoRedo } from '@/hooks/use-undo-redo' +import { + BLOCK_OPERATIONS, + BLOCKS_OPERATIONS, + EDGE_OPERATIONS, + EDGES_OPERATIONS, + OPERATION_TARGETS, + SUBBLOCK_OPERATIONS, + SUBFLOW_OPERATIONS, + VARIABLE_OPERATIONS, + WORKFLOW_OPERATIONS, +} from '@/socket/constants' import { useNotificationStore } from '@/stores/notifications' import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store' import { usePanelEditorStore } from '@/stores/panel/editor/store' @@ -20,8 +31,6 @@ import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/wo const logger = createLogger('CollaborativeWorkflow') -const WEBHOOK_SUBBLOCK_FIELDS = ['webhookId', 'triggerPath'] - export function useCollaborativeWorkflow() { const undoRedo = useUndoRedo() const isUndoRedoInProgress = useRef(false) @@ -33,7 +42,7 @@ export function useCollaborativeWorkflow() { const { blockId, before, after } = e.detail || {} if (!blockId || !before || !after) return if (isUndoRedoInProgress.current) return - undoRedo.recordMove(blockId, before, after) + undoRedo.recordBatchMoveBlocks([{ blockId, before, after }]) } const parentUpdateHandler = (e: any) => { @@ -197,9 +206,9 @@ export function useCollaborativeWorkflow() { isApplyingRemoteChange.current = true try { - if (target === 'block') { + if (target === OPERATION_TARGETS.BLOCK) { switch (operation) { - case 'update-position': { + case BLOCK_OPERATIONS.UPDATE_POSITION: { const blockId = payload.id if (!data.timestamp) { @@ -227,22 +236,22 @@ export function useCollaborativeWorkflow() { } break } - case 'update-name': + case BLOCK_OPERATIONS.UPDATE_NAME: workflowStore.updateBlockName(payload.id, payload.name) break - case 'toggle-enabled': + case BLOCK_OPERATIONS.TOGGLE_ENABLED: workflowStore.toggleBlockEnabled(payload.id) break - case 'update-parent': + case BLOCK_OPERATIONS.UPDATE_PARENT: workflowStore.updateParentId(payload.id, payload.parentId, payload.extent) break - case 'update-advanced-mode': + case BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE: workflowStore.setBlockAdvancedMode(payload.id, payload.advancedMode) break - case 'update-trigger-mode': + case BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE: workflowStore.setBlockTriggerMode(payload.id, payload.triggerMode) break - case 'toggle-handles': { + case BLOCK_OPERATIONS.TOGGLE_HANDLES: { const currentBlock = workflowStore.blocks[payload.id] if (currentBlock && currentBlock.horizontalHandles !== payload.horizontalHandles) { workflowStore.toggleBlockHandles(payload.id) @@ -250,9 +259,9 @@ export function useCollaborativeWorkflow() { break } } - } else if (target === 'blocks') { + } else if (target === OPERATION_TARGETS.BLOCKS) { switch (operation) { - case 'batch-update-positions': { + case BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS: { const { updates } = payload if (Array.isArray(updates)) { updates.forEach(({ id, position }: { id: string; position: Position }) => { @@ -264,12 +273,12 @@ export function useCollaborativeWorkflow() { break } } - } else if (target === 'edge') { + } else if (target === OPERATION_TARGETS.EDGE) { switch (operation) { - case 'add': + case EDGE_OPERATIONS.ADD: workflowStore.addEdge(payload as Edge) break - case 'remove': { + case EDGE_OPERATIONS.REMOVE: { workflowStore.removeEdge(payload.id) const updatedBlocks = useWorkflowStore.getState().blocks @@ -290,9 +299,44 @@ export function useCollaborativeWorkflow() { break } } - } else if (target === 'subflow') { + } else if (target === OPERATION_TARGETS.EDGES) { + switch (operation) { + case EDGES_OPERATIONS.BATCH_REMOVE_EDGES: { + const { ids } = payload + if (Array.isArray(ids)) { + ids.forEach((id: string) => { + workflowStore.removeEdge(id) + }) + + const updatedBlocks = useWorkflowStore.getState().blocks + const updatedEdges = useWorkflowStore.getState().edges + const graph = { + blocksById: updatedBlocks, + edgesById: Object.fromEntries(updatedEdges.map((e) => [e.id, e])), + } + + const undoRedoStore = useUndoRedoStore.getState() + const stackKeys = Object.keys(undoRedoStore.stacks) + stackKeys.forEach((key) => { + const [wfId, uId] = key.split(':') + if (wfId === activeWorkflowId) { + undoRedoStore.pruneInvalidEntries(wfId, uId, graph) + } + }) + } + break + } + case EDGES_OPERATIONS.BATCH_ADD_EDGES: { + const { edges } = payload + if (Array.isArray(edges)) { + edges.forEach((edge: Edge) => workflowStore.addEdge(edge)) + } + break + } + } + } else if (target === OPERATION_TARGETS.SUBFLOW) { switch (operation) { - case 'update': + case SUBFLOW_OPERATIONS.UPDATE: // Handle subflow configuration updates (loop/parallel type changes, etc.) if (payload.type === 'loop') { const { config } = payload @@ -325,9 +369,9 @@ export function useCollaborativeWorkflow() { } break } - } else if (target === 'variable') { + } else if (target === OPERATION_TARGETS.VARIABLE) { switch (operation) { - case 'add': + case VARIABLE_OPERATIONS.ADD: variablesStore.addVariable( { workflowId: payload.workflowId, @@ -338,7 +382,7 @@ export function useCollaborativeWorkflow() { payload.id ) break - case 'variable-update': + case VARIABLE_OPERATIONS.UPDATE: if (payload.field === 'name') { variablesStore.updateVariable(payload.variableId, { name: payload.value }) } else if (payload.field === 'value') { @@ -347,13 +391,13 @@ export function useCollaborativeWorkflow() { variablesStore.updateVariable(payload.variableId, { type: payload.value }) } break - case 'remove': + case VARIABLE_OPERATIONS.REMOVE: variablesStore.deleteVariable(payload.variableId) break } - } else if (target === 'workflow') { + } else if (target === OPERATION_TARGETS.WORKFLOW) { switch (operation) { - case 'replace-state': + case WORKFLOW_OPERATIONS.REPLACE_STATE: if (payload.state) { logger.info('Received workflow state replacement from remote user', { userId, @@ -386,9 +430,9 @@ export function useCollaborativeWorkflow() { } } - if (target === 'blocks') { + if (target === OPERATION_TARGETS.BLOCKS) { switch (operation) { - case 'batch-add-blocks': { + case BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS: { const { blocks, edges, @@ -456,7 +500,7 @@ export function useCollaborativeWorkflow() { logger.info('Successfully applied batch-add-blocks from remote user') break } - case 'batch-remove-blocks': { + case BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS: { const { ids } = payload logger.info('Received batch-remove-blocks from remote user', { userId, @@ -722,7 +766,12 @@ export function useCollaborativeWorkflow() { ) const collaborativeBatchUpdatePositions = useCallback( - (updates: Array<{ id: string; position: Position }>) => { + ( + updates: Array<{ id: string; position: Position }>, + options?: { + previousPositions?: Map + } + ) => { if (!isInActiveRoom()) { logger.debug('Skipping batch position update - not in active workflow') return @@ -735,8 +784,8 @@ export function useCollaborativeWorkflow() { addToQueue({ id: operationId, operation: { - operation: 'batch-update-positions', - target: 'blocks', + operation: BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS, + target: OPERATION_TARGETS.BLOCKS, payload: { updates }, }, workflowId: activeWorkflowId || '', @@ -746,8 +795,31 @@ export function useCollaborativeWorkflow() { updates.forEach(({ id, position }) => { workflowStore.updateBlockPosition(id, position) }) + + if (options?.previousPositions && options.previousPositions.size > 0) { + const moves = updates + .filter((u) => options.previousPositions!.has(u.id)) + .map((u) => { + const prev = options.previousPositions!.get(u.id)! + const block = workflowStore.blocks[u.id] + return { + blockId: u.id, + before: prev, + after: { + x: u.position.x, + y: u.position.y, + parentId: block?.data?.parentId, + }, + } + }) + .filter((m) => m.before.x !== m.after.x || m.before.y !== m.after.y) + + if (moves.length > 0) { + undoRedo.recordBatchMoveBlocks(moves) + } + } }, - [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, workflowStore] + [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, workflowStore, undoRedo] ) const collaborativeUpdateBlockName = useCallback( @@ -781,65 +853,169 @@ export function useCollaborativeWorkflow() { return { success: false, error: `Block name "${trimmedName}" already exists` } } - executeQueuedOperation('update-name', 'block', { id, name: trimmedName }, () => { - const result = workflowStore.updateBlockName(id, trimmedName) + executeQueuedOperation( + BLOCK_OPERATIONS.UPDATE_NAME, + OPERATION_TARGETS.BLOCK, + { id, name: trimmedName }, + () => { + const result = workflowStore.updateBlockName(id, trimmedName) - if (result.success && result.changedSubblocks.length > 0) { - logger.info('Emitting cascaded subblock updates from block rename', { - blockId: id, - newName: trimmedName, - updateCount: result.changedSubblocks.length, - }) + if (result.success && result.changedSubblocks.length > 0) { + logger.info('Emitting cascaded subblock updates from block rename', { + blockId: id, + newName: trimmedName, + updateCount: result.changedSubblocks.length, + }) - result.changedSubblocks.forEach( - ({ - blockId, - subBlockId, - newValue, - }: { - blockId: string - subBlockId: string - newValue: any - }) => { - const operationId = crypto.randomUUID() - addToQueue({ - id: operationId, - operation: { - operation: 'subblock-update', - target: 'subblock', - payload: { blockId, subblockId: subBlockId, value: newValue }, - }, - workflowId: activeWorkflowId || '', - userId: session?.user?.id || 'unknown', - }) - } - ) + result.changedSubblocks.forEach( + ({ + blockId, + subBlockId, + newValue, + }: { + blockId: string + subBlockId: string + newValue: any + }) => { + const operationId = crypto.randomUUID() + addToQueue({ + id: operationId, + operation: { + operation: SUBBLOCK_OPERATIONS.UPDATE, + target: OPERATION_TARGETS.SUBBLOCK, + payload: { blockId, subblockId: subBlockId, value: newValue }, + }, + workflowId: activeWorkflowId || '', + userId: session?.user?.id || 'unknown', + }) + } + ) + } } - }) + ) return { success: true } }, [executeQueuedOperation, workflowStore, addToQueue, activeWorkflowId, session?.user?.id] ) - const collaborativeToggleBlockEnabled = useCallback( - (id: string) => { - executeQueuedOperation('toggle-enabled', 'block', { id }, () => + const collaborativeBatchToggleBlockEnabled = useCallback( + (ids: string[]) => { + if (ids.length === 0) return + + const previousStates: Record = {} + const validIds: string[] = [] + + for (const id of ids) { + const block = workflowStore.blocks[id] + if (block) { + previousStates[id] = block.enabled + validIds.push(id) + } + } + + if (validIds.length === 0) return + + const operationId = crypto.randomUUID() + + addToQueue({ + id: operationId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED, + target: OPERATION_TARGETS.BLOCKS, + payload: { blockIds: validIds, previousStates }, + }, + workflowId: activeWorkflowId || '', + userId: session?.user?.id || 'unknown', + }) + + for (const id of validIds) { workflowStore.toggleBlockEnabled(id) - ) + } + + undoRedo.recordBatchToggleEnabled(validIds, previousStates) }, - [executeQueuedOperation, workflowStore] + [addToQueue, activeWorkflowId, session?.user?.id, workflowStore, undoRedo] ) const collaborativeUpdateParentId = useCallback( (id: string, parentId: string, extent: 'parent') => { - executeQueuedOperation('update-parent', 'block', { id, parentId, extent }, () => - workflowStore.updateParentId(id, parentId, extent) + executeQueuedOperation( + BLOCK_OPERATIONS.UPDATE_PARENT, + OPERATION_TARGETS.BLOCK, + { id, parentId, extent }, + () => workflowStore.updateParentId(id, parentId, extent) ) }, [executeQueuedOperation, workflowStore] ) + const collaborativeBatchUpdateParent = useCallback( + ( + updates: Array<{ + blockId: string + newParentId: string | null + newPosition: { x: number; y: number } + affectedEdges: Edge[] + }> + ) => { + if (!isInActiveRoom()) { + logger.debug('Skipping batch update parent - not in active workflow') + return + } + + if (updates.length === 0) return + + const batchUpdates = updates.map((u) => { + const block = workflowStore.blocks[u.blockId] + const oldParentId = block?.data?.parentId + const oldPosition = block?.position || { x: 0, y: 0 } + + return { + blockId: u.blockId, + oldParentId, + newParentId: u.newParentId || undefined, + oldPosition, + newPosition: u.newPosition, + affectedEdges: u.affectedEdges, + } + }) + + for (const update of updates) { + if (update.affectedEdges.length > 0) { + update.affectedEdges.forEach((e) => workflowStore.removeEdge(e.id)) + } + workflowStore.updateBlockPosition(update.blockId, update.newPosition) + if (update.newParentId) { + workflowStore.updateParentId(update.blockId, update.newParentId, 'parent') + } + } + + undoRedo.recordBatchUpdateParent(batchUpdates) + + const operationId = crypto.randomUUID() + addToQueue({ + id: operationId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT, + target: OPERATION_TARGETS.BLOCKS, + payload: { + updates: batchUpdates.map((u) => ({ + id: u.blockId, + parentId: u.newParentId || '', + position: u.newPosition, + })), + }, + }, + workflowId: activeWorkflowId || '', + userId: session?.user?.id || 'unknown', + }) + + logger.debug('Batch updated parent for blocks', { updateCount: updates.length }) + }, + [isInActiveRoom, workflowStore, undoRedo, addToQueue, activeWorkflowId, session?.user?.id] + ) + const collaborativeToggleBlockAdvancedMode = useCallback( (id: string) => { const currentBlock = workflowStore.blocks[id] @@ -848,8 +1024,8 @@ export function useCollaborativeWorkflow() { const newAdvancedMode = !currentBlock.advancedMode executeQueuedOperation( - 'update-advanced-mode', - 'block', + BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE, + OPERATION_TARGETS.BLOCK, { id, advancedMode: newAdvancedMode }, () => workflowStore.toggleBlockAdvancedMode(id) ) @@ -879,8 +1055,8 @@ export function useCollaborativeWorkflow() { } executeQueuedOperation( - 'update-trigger-mode', - 'block', + BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE, + OPERATION_TARGETS.BLOCK, { id, triggerMode: newTriggerMode }, () => workflowStore.toggleBlockTriggerMode(id) ) @@ -888,27 +1064,50 @@ export function useCollaborativeWorkflow() { [executeQueuedOperation, workflowStore] ) - const collaborativeToggleBlockHandles = useCallback( - (id: string) => { - const currentBlock = workflowStore.blocks[id] - if (!currentBlock) return + const collaborativeBatchToggleBlockHandles = useCallback( + (ids: string[]) => { + if (ids.length === 0) return - const newHorizontalHandles = !currentBlock.horizontalHandles + const previousStates: Record = {} + const validIds: string[] = [] - executeQueuedOperation( - 'toggle-handles', - 'block', - { id, horizontalHandles: newHorizontalHandles }, - () => workflowStore.toggleBlockHandles(id) - ) + for (const id of ids) { + const block = workflowStore.blocks[id] + if (block) { + previousStates[id] = block.horizontalHandles ?? false + validIds.push(id) + } + } + + if (validIds.length === 0) return + + const operationId = crypto.randomUUID() + + addToQueue({ + id: operationId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES, + target: OPERATION_TARGETS.BLOCKS, + payload: { blockIds: validIds, previousStates }, + }, + workflowId: activeWorkflowId || '', + userId: session?.user?.id || 'unknown', + }) + + for (const id of validIds) { + workflowStore.toggleBlockHandles(id) + } + + undoRedo.recordBatchToggleHandles(validIds, previousStates) }, - [executeQueuedOperation, workflowStore] + [addToQueue, activeWorkflowId, session?.user?.id, workflowStore, undoRedo] ) const collaborativeAddEdge = useCallback( (edge: Edge) => { - executeQueuedOperation('add', 'edge', edge, () => workflowStore.addEdge(edge)) - // Only record edge addition if it's not part of a parent update operation + executeQueuedOperation(EDGE_OPERATIONS.ADD, OPERATION_TARGETS.EDGE, edge, () => + workflowStore.addEdge(edge) + ) if (!skipEdgeRecording.current) { undoRedo.recordAddEdge(edge.id) } @@ -920,13 +1119,11 @@ export function useCollaborativeWorkflow() { (edgeId: string) => { const edge = workflowStore.edges.find((e) => e.id === edgeId) - // Skip if edge doesn't exist (already removed during cascade deletion) if (!edge) { logger.debug('Edge already removed, skipping operation', { edgeId }) return } - // Check if the edge's source and target blocks still exist const sourceExists = workflowStore.blocks[edge.source] const targetExists = workflowStore.blocks[edge.target] @@ -939,23 +1136,75 @@ export function useCollaborativeWorkflow() { return } - // Only record edge removal if it's not part of a parent update operation if (!skipEdgeRecording.current) { - undoRedo.recordRemoveEdge(edgeId, edge) + undoRedo.recordBatchRemoveEdges([edge]) } - executeQueuedOperation('remove', 'edge', { id: edgeId }, () => + executeQueuedOperation(EDGE_OPERATIONS.REMOVE, OPERATION_TARGETS.EDGE, { id: edgeId }, () => workflowStore.removeEdge(edgeId) ) }, [executeQueuedOperation, workflowStore, undoRedo] ) + const collaborativeBatchRemoveEdges = useCallback( + (edgeIds: string[], options?: { skipUndoRedo?: boolean }) => { + if (!isInActiveRoom()) { + logger.debug('Skipping batch remove edges - not in active workflow') + return false + } + + if (edgeIds.length === 0) return false + + const edgeSnapshots: Edge[] = [] + const validEdgeIds: string[] = [] + + for (const edgeId of edgeIds) { + const edge = workflowStore.edges.find((e) => e.id === edgeId) + if (edge) { + const sourceExists = workflowStore.blocks[edge.source] + const targetExists = workflowStore.blocks[edge.target] + if (sourceExists && targetExists) { + edgeSnapshots.push(edge) + validEdgeIds.push(edgeId) + } + } + } + + if (validEdgeIds.length === 0) { + logger.debug('No valid edges to remove') + return false + } + + const operationId = crypto.randomUUID() + + addToQueue({ + id: operationId, + operation: { + operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { ids: validEdgeIds }, + }, + workflowId: activeWorkflowId || '', + userId: session?.user?.id || 'unknown', + }) + + validEdgeIds.forEach((id) => workflowStore.removeEdge(id)) + + if (!options?.skipUndoRedo && edgeSnapshots.length > 0) { + undoRedo.recordBatchRemoveEdges(edgeSnapshots) + } + + logger.info('Batch removed edges', { count: validEdgeIds.length }) + return true + }, + [isInActiveRoom, workflowStore, addToQueue, activeWorkflowId, session, undoRedo] + ) + const collaborativeSetSubblockValue = useCallback( (blockId: string, subblockId: string, value: any, options?: { _visited?: Set }) => { if (isApplyingRemoteChange.current) return - // Skip socket operations when viewing baseline diff if (isBaselineDiffView) { logger.debug('Skipping collaborative subblock update while viewing baseline diff') return @@ -971,28 +1220,23 @@ export function useCollaborativeWorkflow() { return } - // Generate operation ID for queue tracking const operationId = crypto.randomUUID() - // Get fresh activeWorkflowId from store to avoid stale closure const currentActiveWorkflowId = useWorkflowRegistry.getState().activeWorkflowId - // Add to queue for retry mechanism addToQueue({ id: operationId, operation: { - operation: 'subblock-update', - target: 'subblock', + operation: SUBBLOCK_OPERATIONS.UPDATE, + target: OPERATION_TARGETS.SUBBLOCK, payload: { blockId, subblockId, value }, }, workflowId: currentActiveWorkflowId || '', userId: session?.user?.id || 'unknown', }) - // Apply locally first (immediate UI feedback) subBlockStore.setValue(blockId, subblockId, value) - // Declarative clearing: clear sub-blocks that depend on this subblockId try { const visited = options?._visited || new Set() if (visited.has(subblockId)) return @@ -1004,9 +1248,7 @@ export function useCollaborativeWorkflow() { (sb: any) => Array.isArray(sb.dependsOn) && sb.dependsOn.includes(subblockId) ) for (const dep of dependents) { - // Skip clearing if the dependent is the same field if (!dep?.id || dep.id === subblockId) continue - // Cascade using the same collaborative path so it emits and further cascades collaborativeSetSubblockValue(blockId, dep.id, '', { _visited: visited }) } } @@ -1049,8 +1291,8 @@ export function useCollaborativeWorkflow() { addToQueue({ id: operationId, operation: { - operation: 'subblock-update', - target: 'subblock', + operation: SUBBLOCK_OPERATIONS.UPDATE, + target: OPERATION_TARGETS.SUBBLOCK, payload: { blockId, subblockId, value }, }, workflowId: activeWorkflowId || '', @@ -1096,12 +1338,17 @@ export function useCollaborativeWorkflow() { doWhileCondition: existingDoWhileCondition ?? '', } - executeQueuedOperation('update', 'subflow', { id: loopId, type: 'loop', config }, () => { - workflowStore.updateLoopType(loopId, loopType) - workflowStore.setLoopForEachItems(loopId, existingForEachItems ?? '') - workflowStore.setLoopWhileCondition(loopId, existingWhileCondition ?? '') - workflowStore.setLoopDoWhileCondition(loopId, existingDoWhileCondition ?? '') - }) + executeQueuedOperation( + SUBFLOW_OPERATIONS.UPDATE, + OPERATION_TARGETS.SUBFLOW, + { id: loopId, type: 'loop', config }, + () => { + workflowStore.updateLoopType(loopId, loopType) + workflowStore.setLoopForEachItems(loopId, existingForEachItems ?? '') + workflowStore.setLoopWhileCondition(loopId, existingWhileCondition ?? '') + workflowStore.setLoopDoWhileCondition(loopId, existingDoWhileCondition ?? '') + } + ) }, [executeQueuedOperation, workflowStore] ) @@ -1134,8 +1381,8 @@ export function useCollaborativeWorkflow() { } executeQueuedOperation( - 'update', - 'subflow', + SUBFLOW_OPERATIONS.UPDATE, + OPERATION_TARGETS.SUBFLOW, { id: parallelId, type: 'parallel', config }, () => { workflowStore.updateParallelType(parallelId, parallelType) @@ -1169,8 +1416,11 @@ export function useCollaborativeWorkflow() { forEachItems: currentCollection, } - executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () => - workflowStore.updateLoopCount(nodeId, count) + executeQueuedOperation( + SUBFLOW_OPERATIONS.UPDATE, + OPERATION_TARGETS.SUBFLOW, + { id: nodeId, type: 'loop', config }, + () => workflowStore.updateLoopCount(nodeId, count) ) } else { const currentDistribution = currentBlock.data?.collection || '' @@ -1184,8 +1434,11 @@ export function useCollaborativeWorkflow() { parallelType: currentParallelType, } - executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () => - workflowStore.updateParallelCount(nodeId, count) + executeQueuedOperation( + SUBFLOW_OPERATIONS.UPDATE, + OPERATION_TARGETS.SUBFLOW, + { id: nodeId, type: 'parallel', config }, + () => workflowStore.updateParallelCount(nodeId, count) ) } }, @@ -1230,11 +1483,16 @@ export function useCollaborativeWorkflow() { doWhileCondition: nextDoWhileCondition ?? '', } - executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () => { - workflowStore.setLoopForEachItems(nodeId, nextForEachItems ?? '') - workflowStore.setLoopWhileCondition(nodeId, nextWhileCondition ?? '') - workflowStore.setLoopDoWhileCondition(nodeId, nextDoWhileCondition ?? '') - }) + executeQueuedOperation( + SUBFLOW_OPERATIONS.UPDATE, + OPERATION_TARGETS.SUBFLOW, + { id: nodeId, type: 'loop', config }, + () => { + workflowStore.setLoopForEachItems(nodeId, nextForEachItems ?? '') + workflowStore.setLoopWhileCondition(nodeId, nextWhileCondition ?? '') + workflowStore.setLoopDoWhileCondition(nodeId, nextDoWhileCondition ?? '') + } + ) } else { const currentCount = currentBlock.data?.count || 5 const currentParallelType = currentBlock.data?.parallelType || 'count' @@ -1247,8 +1505,11 @@ export function useCollaborativeWorkflow() { parallelType: currentParallelType, } - executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () => - workflowStore.updateParallelCollection(nodeId, collection) + executeQueuedOperation( + SUBFLOW_OPERATIONS.UPDATE, + OPERATION_TARGETS.SUBFLOW, + { id: nodeId, type: 'parallel', config }, + () => workflowStore.updateParallelCollection(nodeId, collection) ) } }, @@ -1257,15 +1518,20 @@ export function useCollaborativeWorkflow() { const collaborativeUpdateVariable = useCallback( (variableId: string, field: 'name' | 'value' | 'type', value: any) => { - executeQueuedOperation('variable-update', 'variable', { variableId, field, value }, () => { - if (field === 'name') { - variablesStore.updateVariable(variableId, { name: value }) - } else if (field === 'value') { - variablesStore.updateVariable(variableId, { value }) - } else if (field === 'type') { - variablesStore.updateVariable(variableId, { type: value }) + executeQueuedOperation( + VARIABLE_OPERATIONS.UPDATE, + OPERATION_TARGETS.VARIABLE, + { variableId, field, value }, + () => { + if (field === 'name') { + variablesStore.updateVariable(variableId, { name: value }) + } else if (field === 'value') { + variablesStore.updateVariable(variableId, { value }) + } else if (field === 'type') { + variablesStore.updateVariable(variableId, { type: value }) + } } - }) + ) }, [executeQueuedOperation, variablesStore] ) @@ -1287,7 +1553,12 @@ export function useCollaborativeWorkflow() { // Queue operation with processed name for server & other clients // Empty callback because local store is already updated above - executeQueuedOperation('add', 'variable', payloadWithProcessedName, () => {}) + executeQueuedOperation( + VARIABLE_OPERATIONS.ADD, + OPERATION_TARGETS.VARIABLE, + payloadWithProcessedName, + () => {} + ) } return id @@ -1299,9 +1570,14 @@ export function useCollaborativeWorkflow() { (variableId: string) => { cancelOperationsForVariable(variableId) - executeQueuedOperation('remove', 'variable', { variableId }, () => { - variablesStore.deleteVariable(variableId) - }) + executeQueuedOperation( + VARIABLE_OPERATIONS.REMOVE, + OPERATION_TARGETS.VARIABLE, + { variableId }, + () => { + variablesStore.deleteVariable(variableId) + } + ) }, [executeQueuedOperation, variablesStore, cancelOperationsForVariable] ) @@ -1337,8 +1613,8 @@ export function useCollaborativeWorkflow() { addToQueue({ id: operationId, operation: { - operation: 'batch-add-blocks', - target: 'blocks', + operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS, + target: OPERATION_TARGETS.BLOCKS, payload: { blocks, edges, loops, parallels, subBlockValues }, }, workflowId: activeWorkflowId || '', @@ -1469,8 +1745,8 @@ export function useCollaborativeWorkflow() { addToQueue({ id: operationId, operation: { - operation: 'batch-remove-blocks', - target: 'blocks', + operation: BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS, + target: OPERATION_TARGETS.BLOCKS, payload: { ids: Array.from(allBlocksToRemove) }, }, workflowId: activeWorkflowId || '', @@ -1512,15 +1788,17 @@ export function useCollaborativeWorkflow() { // Collaborative operations collaborativeBatchUpdatePositions, collaborativeUpdateBlockName, - collaborativeToggleBlockEnabled, + collaborativeBatchToggleBlockEnabled, collaborativeUpdateParentId, + collaborativeBatchUpdateParent, collaborativeToggleBlockAdvancedMode, collaborativeToggleBlockTriggerMode, - collaborativeToggleBlockHandles, + collaborativeBatchToggleBlockHandles, collaborativeBatchAddBlocks, collaborativeBatchRemoveBlocks, collaborativeAddEdge, collaborativeRemoveEdge, + collaborativeBatchRemoveEdges, collaborativeSetSubblockValue, collaborativeSetTagSelection, diff --git a/apps/sim/hooks/use-forwarded-ref.ts b/apps/sim/hooks/use-forwarded-ref.ts deleted file mode 100644 index 70bbc4ad37..0000000000 --- a/apps/sim/hooks/use-forwarded-ref.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { type MutableRefObject, useEffect, useRef } from 'react' - -/** - * A hook that handles forwarded refs and returns a mutable ref object - * Useful for components that need both a forwarded ref and a local ref - * @param forwardedRef The forwarded ref from React.forwardRef - * @returns A mutable ref object that can be used locally - */ -export function useForwardedRef( - forwardedRef: React.ForwardedRef -): MutableRefObject { - const innerRef = useRef(null) - - useEffect(() => { - if (!forwardedRef) return - - if (typeof forwardedRef === 'function') { - forwardedRef(innerRef.current) - } else { - forwardedRef.current = innerRef.current - } - }, [forwardedRef]) - - return innerRef -} diff --git a/apps/sim/hooks/use-subscription-state.ts b/apps/sim/hooks/use-subscription-state.ts deleted file mode 100644 index 5bb52ad135..0000000000 --- a/apps/sim/hooks/use-subscription-state.ts +++ /dev/null @@ -1,217 +0,0 @@ -import { useCallback, useEffect, useState } from 'react' -import { createLogger } from '@sim/logger' -import { DEFAULT_FREE_CREDITS } from '@/lib/billing/constants' - -const logger = createLogger('useSubscriptionState') - -interface UsageData { - current: number - limit: number - percentUsed: number - isWarning: boolean - isExceeded: boolean - billingPeriodStart: Date | null - billingPeriodEnd: Date | null - lastPeriodCost: number -} - -interface SubscriptionState { - isPaid: boolean - isPro: boolean - isTeam: boolean - isEnterprise: boolean - plan: string - status: string | null - seats: number | null - metadata: any | null - usage: UsageData -} - -/** - * Consolidated hook for subscription state management - * Combines subscription status, features, and usage data - */ -export function useSubscriptionState() { - const [data, setData] = useState(null) - const [isLoading, setIsLoading] = useState(true) - const [error, setError] = useState(null) - - const fetchSubscriptionState = useCallback(async () => { - try { - setIsLoading(true) - setError(null) - - const response = await fetch('/api/billing?context=user') - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`) - } - - const result = await response.json() - const subscriptionData = result.data - setData(subscriptionData) - } catch (error) { - const err = error instanceof Error ? error : new Error('Failed to fetch subscription state') - logger.error('Failed to fetch subscription state', { error }) - setError(err) - } finally { - setIsLoading(false) - } - }, []) - - useEffect(() => { - fetchSubscriptionState() - }, [fetchSubscriptionState]) - - const refetch = useCallback(() => { - return fetchSubscriptionState() - }, [fetchSubscriptionState]) - - return { - subscription: { - isPaid: data?.isPaid ?? false, - isPro: data?.isPro ?? false, - isTeam: data?.isTeam ?? false, - isEnterprise: data?.isEnterprise ?? false, - isFree: !(data?.isPaid ?? false), - plan: data?.plan ?? 'free', - status: data?.status, - seats: data?.seats, - metadata: data?.metadata, - }, - - usage: { - current: data?.usage?.current ?? 0, - limit: data?.usage?.limit ?? DEFAULT_FREE_CREDITS, - percentUsed: data?.usage?.percentUsed ?? 0, - isWarning: data?.usage?.isWarning ?? false, - isExceeded: data?.usage?.isExceeded ?? false, - billingPeriodStart: data?.usage?.billingPeriodStart - ? new Date(data.usage.billingPeriodStart) - : null, - billingPeriodEnd: data?.usage?.billingPeriodEnd - ? new Date(data.usage.billingPeriodEnd) - : null, - lastPeriodCost: data?.usage?.lastPeriodCost ?? 0, - }, - - isLoading, - error, - refetch, - - isAtLeastPro: () => { - return data?.isPro || data?.isTeam || data?.isEnterprise || false - }, - - isAtLeastTeam: () => { - return data?.isTeam || data?.isEnterprise || false - }, - - canUpgrade: () => { - return data?.plan === 'free' || data?.plan === 'pro' - }, - - getBillingStatus: () => { - const usage = data?.usage - if (!usage) return 'unknown' - - if (usage.isExceeded) return 'exceeded' - if (usage.isWarning) return 'warning' - return 'ok' - }, - - getRemainingBudget: () => { - const usage = data?.usage - if (!usage) return 0 - return Math.max(0, usage.limit - usage.current) - }, - - getDaysRemainingInPeriod: () => { - const usage = data?.usage - if (!usage?.billingPeriodEnd) return null - - const now = new Date() - const endDate = new Date(usage.billingPeriodEnd) - const diffTime = endDate.getTime() - now.getTime() - const diffDays = Math.ceil(diffTime / (1000 * 60 * 60 * 24)) - - return Math.max(0, diffDays) - }, - } -} - -/** - * Hook for usage limit information with editing capabilities - */ -export function useUsageLimit() { - const [data, setData] = useState(null) - const [isLoading, setIsLoading] = useState(true) - const [error, setError] = useState(null) - - const fetchUsageLimit = useCallback(async () => { - try { - setIsLoading(true) - setError(null) - - const response = await fetch('/api/usage?context=user') - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`) - } - - const limitData = await response.json() - setData(limitData) - } catch (error) { - const err = error instanceof Error ? error : new Error('Failed to fetch usage limit') - logger.error('Failed to fetch usage limit', { error }) - setError(err) - } finally { - setIsLoading(false) - } - }, []) - - useEffect(() => { - fetchUsageLimit() - }, [fetchUsageLimit]) - - const refetch = useCallback(() => { - return fetchUsageLimit() - }, [fetchUsageLimit]) - - const updateLimit = async (newLimit: number) => { - try { - const response = await fetch('/api/usage?context=user', { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ limit: newLimit }), - }) - - if (!response.ok) { - const errorData = await response.json() - throw new Error(errorData.error || 'Failed to update usage limit') - } - - await refetch() - - return { success: true } - } catch (error) { - logger.error('Failed to update usage limit', { error, newLimit }) - throw error - } - } - - return { - currentLimit: data?.currentLimit ?? DEFAULT_FREE_CREDITS, - canEdit: data?.canEdit ?? false, - minimumLimit: data?.minimumLimit ?? DEFAULT_FREE_CREDITS, - plan: data?.plan ?? 'free', - setBy: data?.setBy, - updatedAt: data?.updatedAt ? new Date(data.updatedAt) : null, - updateLimit, - isLoading, - error, - refetch, - } -} diff --git a/apps/sim/hooks/use-undo-redo.ts b/apps/sim/hooks/use-undo-redo.ts index 33457cf390..740b50293b 100644 --- a/apps/sim/hooks/use-undo-redo.ts +++ b/apps/sim/hooks/use-undo-redo.ts @@ -3,14 +3,25 @@ import { createLogger } from '@sim/logger' import type { Edge } from 'reactflow' import { useSession } from '@/lib/auth/auth-client' import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations' +import { + BLOCK_OPERATIONS, + BLOCKS_OPERATIONS, + EDGE_OPERATIONS, + EDGES_OPERATIONS, + OPERATION_TARGETS, + UNDO_REDO_OPERATIONS, +} from '@/socket/constants' import { useOperationQueue } from '@/stores/operation-queue/store' import { type BatchAddBlocksOperation, + type BatchAddEdgesOperation, + type BatchMoveBlocksOperation, type BatchRemoveBlocksOperation, + type BatchRemoveEdgesOperation, + type BatchToggleEnabledOperation, + type BatchToggleHandlesOperation, + type BatchUpdateParentOperation, createOperationEntry, - type MoveBlockOperation, - type Operation, - type RemoveEdgeOperation, runWithUndoRedoRecordingSuspended, type UpdateParentOperation, useUndoRedoStore, @@ -42,7 +53,7 @@ export function useUndoRedo() { const operation: BatchAddBlocksOperation = { id: crypto.randomUUID(), - type: 'batch-add-blocks', + type: UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -55,7 +66,7 @@ export function useUndoRedo() { const inverse: BatchRemoveBlocksOperation = { id: crypto.randomUUID(), - type: 'batch-remove-blocks', + type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -88,7 +99,7 @@ export function useUndoRedo() { const operation: BatchRemoveBlocksOperation = { id: crypto.randomUUID(), - type: 'batch-remove-blocks', + type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -101,7 +112,7 @@ export function useUndoRedo() { const inverse: BatchAddBlocksOperation = { id: crypto.randomUUID(), - type: 'batch-add-blocks', + type: UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -128,25 +139,28 @@ export function useUndoRedo() { (edgeId: string) => { if (!activeWorkflowId) return - const operation: Operation = { + const edgeSnapshot = workflowStore.edges.find((e) => e.id === edgeId) + if (!edgeSnapshot) { + logger.warn('Edge not found when recording add edge', { edgeId }) + return + } + + const operation: BatchAddEdgesOperation = { id: crypto.randomUUID(), - type: 'add-edge', + type: UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES, timestamp: Date.now(), workflowId: activeWorkflowId, userId, - data: { edgeId }, + data: { edgeSnapshots: [edgeSnapshot] }, } - const inverse: RemoveEdgeOperation = { + const inverse: BatchRemoveEdgesOperation = { id: crypto.randomUUID(), - type: 'remove-edge', + type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES, timestamp: Date.now(), workflowId: activeWorkflowId, userId, - data: { - edgeId, - edgeSnapshot: workflowStore.edges.find((e) => e.id === edgeId) || null, - }, + data: { edgeSnapshots: [edgeSnapshot] }, } const entry = createOperationEntry(operation, inverse) @@ -157,77 +171,81 @@ export function useUndoRedo() { [activeWorkflowId, userId, workflowStore, undoRedoStore] ) - const recordRemoveEdge = useCallback( - (edgeId: string, edgeSnapshot: Edge) => { - if (!activeWorkflowId) return + const recordBatchRemoveEdges = useCallback( + (edgeSnapshots: Edge[]) => { + if (!activeWorkflowId || edgeSnapshots.length === 0) return - const operation: RemoveEdgeOperation = { + const operation: BatchRemoveEdgesOperation = { id: crypto.randomUUID(), - type: 'remove-edge', + type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES, timestamp: Date.now(), workflowId: activeWorkflowId, userId, data: { - edgeId, - edgeSnapshot, + edgeSnapshots, }, } - const inverse: Operation = { + const inverse: BatchAddEdgesOperation = { id: crypto.randomUUID(), - type: 'add-edge', + type: UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES, timestamp: Date.now(), workflowId: activeWorkflowId, userId, - data: { edgeId }, + data: { + edgeSnapshots, + }, } const entry = createOperationEntry(operation, inverse) undoRedoStore.push(activeWorkflowId, userId, entry) - logger.debug('Recorded remove edge', { edgeId, workflowId: activeWorkflowId }) + logger.debug('Recorded batch remove edges', { + edgeCount: edgeSnapshots.length, + workflowId: activeWorkflowId, + }) }, [activeWorkflowId, userId, undoRedoStore] ) - const recordMove = useCallback( + const recordBatchMoveBlocks = useCallback( ( - blockId: string, - before: { x: number; y: number; parentId?: string }, - after: { x: number; y: number; parentId?: string } + moves: Array<{ + blockId: string + before: { x: number; y: number; parentId?: string } + after: { x: number; y: number; parentId?: string } + }> ) => { - if (!activeWorkflowId) return + if (!activeWorkflowId || moves.length === 0) return - const operation: MoveBlockOperation = { + const operation: BatchMoveBlocksOperation = { id: crypto.randomUUID(), - type: 'move-block', + type: UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS, timestamp: Date.now(), workflowId: activeWorkflowId, userId, - data: { - blockId, - before, - after, - }, + data: { moves }, } - const inverse: MoveBlockOperation = { + const inverse: BatchMoveBlocksOperation = { id: crypto.randomUUID(), - type: 'move-block', + type: UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS, timestamp: Date.now(), workflowId: activeWorkflowId, userId, data: { - blockId, - before: after, - after: before, + moves: moves.map((m) => ({ + blockId: m.blockId, + before: m.after, + after: m.before, + })), }, } const entry = createOperationEntry(operation, inverse) undoRedoStore.push(activeWorkflowId, userId, entry) - logger.debug('Recorded move', { blockId, from: before, to: after }) + logger.debug('Recorded batch move', { blockCount: moves.length }) }, [activeWorkflowId, userId, undoRedoStore] ) @@ -245,7 +263,7 @@ export function useUndoRedo() { const operation: UpdateParentOperation = { id: crypto.randomUUID(), - type: 'update-parent', + type: UNDO_REDO_OPERATIONS.UPDATE_PARENT, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -261,7 +279,7 @@ export function useUndoRedo() { const inverse: UpdateParentOperation = { id: crypto.randomUUID(), - type: 'update-parent', + type: UNDO_REDO_OPERATIONS.UPDATE_PARENT, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -271,7 +289,7 @@ export function useUndoRedo() { newParentId: oldParentId, oldPosition: newPosition, newPosition: oldPosition, - affectedEdges, // Same edges need to be restored + affectedEdges, }, } @@ -288,6 +306,117 @@ export function useUndoRedo() { [activeWorkflowId, userId, undoRedoStore] ) + const recordBatchUpdateParent = useCallback( + ( + updates: Array<{ + blockId: string + oldParentId?: string + newParentId?: string + oldPosition: { x: number; y: number } + newPosition: { x: number; y: number } + affectedEdges?: Edge[] + }> + ) => { + if (!activeWorkflowId || updates.length === 0) return + + const operation: BatchUpdateParentOperation = { + id: crypto.randomUUID(), + type: UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT, + timestamp: Date.now(), + workflowId: activeWorkflowId, + userId, + data: { updates }, + } + + const inverse: BatchUpdateParentOperation = { + id: crypto.randomUUID(), + type: UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT, + timestamp: Date.now(), + workflowId: activeWorkflowId, + userId, + data: { + updates: updates.map((u) => ({ + blockId: u.blockId, + oldParentId: u.newParentId, + newParentId: u.oldParentId, + oldPosition: u.newPosition, + newPosition: u.oldPosition, + affectedEdges: u.affectedEdges, + })), + }, + } + + const entry = createOperationEntry(operation, inverse) + undoRedoStore.push(activeWorkflowId, userId, entry) + + logger.debug('Recorded batch update parent', { + updateCount: updates.length, + workflowId: activeWorkflowId, + }) + }, + [activeWorkflowId, userId, undoRedoStore] + ) + + const recordBatchToggleEnabled = useCallback( + (blockIds: string[], previousStates: Record) => { + if (!activeWorkflowId || blockIds.length === 0) return + + const operation: BatchToggleEnabledOperation = { + id: crypto.randomUUID(), + type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED, + timestamp: Date.now(), + workflowId: activeWorkflowId, + userId, + data: { blockIds, previousStates }, + } + + const inverse: BatchToggleEnabledOperation = { + id: crypto.randomUUID(), + type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED, + timestamp: Date.now(), + workflowId: activeWorkflowId, + userId, + data: { blockIds, previousStates }, + } + + const entry = createOperationEntry(operation, inverse) + undoRedoStore.push(activeWorkflowId, userId, entry) + + logger.debug('Recorded batch toggle enabled', { blockIds, previousStates }) + }, + [activeWorkflowId, userId, undoRedoStore] + ) + + const recordBatchToggleHandles = useCallback( + (blockIds: string[], previousStates: Record) => { + if (!activeWorkflowId || blockIds.length === 0) return + + const operation: BatchToggleHandlesOperation = { + id: crypto.randomUUID(), + type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES, + timestamp: Date.now(), + workflowId: activeWorkflowId, + userId, + data: { blockIds, previousStates }, + } + + const inverse: BatchToggleHandlesOperation = { + id: crypto.randomUUID(), + type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES, + timestamp: Date.now(), + workflowId: activeWorkflowId, + userId, + data: { blockIds, previousStates }, + } + + const entry = createOperationEntry(operation, inverse) + undoRedoStore.push(activeWorkflowId, userId, entry) + + logger.debug('Recorded batch toggle handles', { blockIds, previousStates }) + }, + [activeWorkflowId, userId, undoRedoStore] + ) + const undo = useCallback(async () => { if (!activeWorkflowId) return @@ -307,7 +436,7 @@ export function useUndoRedo() { const opId = crypto.randomUUID() switch (entry.inverse.type) { - case 'batch-remove-blocks': { + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: { const batchRemoveOp = entry.inverse as BatchRemoveBlocksOperation const { blockSnapshots } = batchRemoveOp.data const blockIds = blockSnapshots.map((b) => b.id) @@ -344,8 +473,8 @@ export function useUndoRedo() { addToQueue({ id: opId, operation: { - operation: 'batch-remove-blocks', - target: 'blocks', + operation: BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS, + target: OPERATION_TARGETS.BLOCKS, payload: { ids: existingBlockIds }, }, workflowId: activeWorkflowId, @@ -355,8 +484,9 @@ export function useUndoRedo() { existingBlockIds.forEach((id) => workflowStore.removeBlock(id)) break } - case 'batch-add-blocks': { - const batchAddOp = entry.operation as BatchAddBlocksOperation + case UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS: { + // Undoing a removal: inverse is batch-add-blocks, use entry.inverse for data + const batchAddOp = entry.inverse as BatchAddBlocksOperation const { blockSnapshots, edgeSnapshots, subBlockValues } = batchAddOp.data const blocksToAdd = blockSnapshots.filter((b) => !workflowStore.blocks[b.id]) @@ -368,8 +498,8 @@ export function useUndoRedo() { addToQueue({ id: opId, operation: { - operation: 'batch-add-blocks', - target: 'blocks', + operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS, + target: OPERATION_TARGETS.BLOCKS, payload: { blocks: blocksToAdd, edges: edgeSnapshots || [], @@ -422,127 +552,117 @@ export function useUndoRedo() { } break } - case 'remove-edge': { - const removeEdgeInverse = entry.inverse as RemoveEdgeOperation - const { edgeId } = removeEdgeInverse.data - if (workflowStore.edges.find((e) => e.id === edgeId)) { + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: { + // Undo batch-add-edges: inverse is batch-remove-edges, so remove the edges + const batchRemoveInverse = entry.inverse as BatchRemoveEdgesOperation + const { edgeSnapshots } = batchRemoveInverse.data + + const edgesToRemove = edgeSnapshots + .filter((e) => workflowStore.edges.find((edge) => edge.id === e.id)) + .map((e) => e.id) + + if (edgesToRemove.length > 0) { addToQueue({ id: opId, operation: { - operation: 'remove', - target: 'edge', - payload: { - id: edgeId, - isUndo: true, - originalOpId: entry.id, - }, + operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { ids: edgesToRemove }, }, workflowId: activeWorkflowId, userId, }) - workflowStore.removeEdge(edgeId) - } else { - logger.debug('Undo remove-edge skipped; edge missing', { - edgeId, - }) + edgesToRemove.forEach((id) => workflowStore.removeEdge(id)) } + logger.debug('Undid batch-add-edges', { edgeCount: edgesToRemove.length }) break } - case 'add-edge': { - const originalOp = entry.operation as RemoveEdgeOperation - const { edgeSnapshot } = originalOp.data - // Skip if snapshot missing or already exists - if (!edgeSnapshot || workflowStore.edges.find((e) => e.id === edgeSnapshot.id)) { - logger.debug('Undo add-edge skipped', { - hasSnapshot: Boolean(edgeSnapshot), + case UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES: { + // Undo batch-remove-edges: inverse is batch-add-edges, so add edges back + const batchAddInverse = entry.inverse as BatchAddEdgesOperation + const { edgeSnapshots } = batchAddInverse.data + + const edgesToAdd = edgeSnapshots.filter( + (e) => !workflowStore.edges.find((edge) => edge.id === e.id) + ) + + if (edgesToAdd.length > 0) { + addToQueue({ + id: opId, + operation: { + operation: EDGES_OPERATIONS.BATCH_ADD_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edges: edgesToAdd }, + }, + workflowId: activeWorkflowId, + userId, }) - break + edgesToAdd.forEach((edge) => workflowStore.addEdge(edge)) } - addToQueue({ - id: opId, - operation: { - operation: 'add', - target: 'edge', - payload: { ...edgeSnapshot, isUndo: true, originalOpId: entry.id }, - }, - workflowId: activeWorkflowId, - userId, - }) - workflowStore.addEdge(edgeSnapshot) + logger.debug('Undid batch-remove-edges', { edgeCount: edgesToAdd.length }) break } - case 'move-block': { - const moveOp = entry.inverse as MoveBlockOperation + case UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS: { + const batchMoveOp = entry.inverse as BatchMoveBlocksOperation const currentBlocks = useWorkflowStore.getState().blocks - if (currentBlocks[moveOp.data.blockId]) { - // Apply the inverse's target as the undo result (inverse.after) + const positionUpdates: Array<{ id: string; position: { x: number; y: number } }> = [] + + for (const move of batchMoveOp.data.moves) { + if (currentBlocks[move.blockId]) { + positionUpdates.push({ + id: move.blockId, + position: { x: move.after.x, y: move.after.y }, + }) + workflowStore.updateBlockPosition(move.blockId, { + x: move.after.x, + y: move.after.y, + }) + } + } + + if (positionUpdates.length > 0) { addToQueue({ id: opId, operation: { - operation: 'update-position', - target: 'block', - payload: { - id: moveOp.data.blockId, - position: { x: moveOp.data.after.x, y: moveOp.data.after.y }, - parentId: moveOp.data.after.parentId, - commit: true, - isUndo: true, - originalOpId: entry.id, - }, + operation: BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS, + target: OPERATION_TARGETS.BLOCKS, + payload: { updates: positionUpdates }, }, workflowId: activeWorkflowId, userId, }) - // Use the store from the hook context for React re-renders - workflowStore.updateBlockPosition(moveOp.data.blockId, { - x: moveOp.data.after.x, - y: moveOp.data.after.y, - }) - if (moveOp.data.after.parentId !== moveOp.data.before.parentId) { - workflowStore.updateParentId( - moveOp.data.blockId, - moveOp.data.after.parentId || '', - 'parent' - ) - } - } else { - logger.debug('Undo move-block skipped; block missing', { - blockId: moveOp.data.blockId, - }) } break } - case 'update-parent': { - // Undo parent update means reverting to the old parent and position + case UNDO_REDO_OPERATIONS.UPDATE_PARENT: { const updateOp = entry.inverse as UpdateParentOperation const { blockId, newParentId, newPosition, affectedEdges } = updateOp.data if (workflowStore.blocks[blockId]) { - // If we're moving back INTO a subflow, restore edges first if (newParentId && affectedEdges && affectedEdges.length > 0) { - affectedEdges.forEach((edge) => { - if (!workflowStore.edges.find((e) => e.id === edge.id)) { - workflowStore.addEdge(edge) - addToQueue({ - id: crypto.randomUUID(), - operation: { - operation: 'add', - target: 'edge', - payload: { ...edge, isUndo: true }, - }, - workflowId: activeWorkflowId, - userId, - }) - } - }) + const edgesToAdd = affectedEdges.filter( + (e) => !workflowStore.edges.find((edge) => edge.id === e.id) + ) + if (edgesToAdd.length > 0) { + addToQueue({ + id: crypto.randomUUID(), + operation: { + operation: EDGES_OPERATIONS.BATCH_ADD_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edges: edgesToAdd }, + }, + workflowId: activeWorkflowId, + userId, + }) + edgesToAdd.forEach((edge) => workflowStore.addEdge(edge)) + } } - // Send position update to server addToQueue({ id: crypto.randomUUID(), operation: { - operation: 'update-position', - target: 'block', + operation: BLOCK_OPERATIONS.UPDATE_POSITION, + target: OPERATION_TARGETS.BLOCK, payload: { id: blockId, position: newPosition, @@ -562,8 +682,8 @@ export function useUndoRedo() { addToQueue({ id: opId, operation: { - operation: 'update-parent', - target: 'block', + operation: BLOCK_OPERATIONS.UPDATE_PARENT, + target: OPERATION_TARGETS.BLOCK, payload: { id: blockId, parentId: newParentId || '', @@ -587,8 +707,8 @@ export function useUndoRedo() { addToQueue({ id: crypto.randomUUID(), operation: { - operation: 'remove', - target: 'edge', + operation: EDGE_OPERATIONS.REMOVE, + target: OPERATION_TARGETS.EDGE, payload: { id: edge.id, isUndo: true }, }, workflowId: activeWorkflowId, @@ -602,8 +722,142 @@ export function useUndoRedo() { } break } - case 'apply-diff': { - // Undo apply-diff means clearing the diff and restoring baseline + case UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT: { + const batchUpdateOp = entry.inverse as BatchUpdateParentOperation + const { updates } = batchUpdateOp.data + + const validUpdates = updates.filter((u) => workflowStore.blocks[u.blockId]) + if (validUpdates.length === 0) { + logger.debug('Undo batch-update-parent skipped; no blocks exist') + break + } + + // Process each update + for (const update of validUpdates) { + const { blockId, newParentId, newPosition, affectedEdges } = update + + // Moving OUT of subflow (undoing insert) → restore edges first + if (!newParentId && affectedEdges && affectedEdges.length > 0) { + const edgesToAdd = affectedEdges.filter( + (e) => !workflowStore.edges.find((edge) => edge.id === e.id) + ) + if (edgesToAdd.length > 0) { + addToQueue({ + id: crypto.randomUUID(), + operation: { + operation: EDGES_OPERATIONS.BATCH_ADD_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edges: edgesToAdd }, + }, + workflowId: activeWorkflowId, + userId, + }) + edgesToAdd.forEach((edge) => workflowStore.addEdge(edge)) + } + } + + // Moving INTO subflow (undoing removal) → remove edges first + if (newParentId && affectedEdges && affectedEdges.length > 0) { + affectedEdges.forEach((edge) => { + if (workflowStore.edges.find((e) => e.id === edge.id)) { + workflowStore.removeEdge(edge.id) + } + }) + addToQueue({ + id: crypto.randomUUID(), + operation: { + operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edgeIds: affectedEdges.map((e) => e.id) }, + }, + workflowId: activeWorkflowId, + userId, + }) + } + + // Update position and parent locally + workflowStore.updateBlockPosition(blockId, newPosition) + workflowStore.updateParentId(blockId, newParentId || '', 'parent') + } + + // Send batch update to server + addToQueue({ + id: opId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT, + target: OPERATION_TARGETS.BLOCKS, + payload: { + updates: validUpdates.map((u) => ({ + id: u.blockId, + parentId: u.newParentId || '', + position: u.newPosition, + })), + }, + }, + workflowId: activeWorkflowId, + userId, + }) + + logger.debug('Undid batch-update-parent', { updateCount: validUpdates.length }) + break + } + case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED: { + const toggleOp = entry.inverse as BatchToggleEnabledOperation + const { blockIds, previousStates } = toggleOp.data + + const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id]) + if (validBlockIds.length === 0) { + logger.debug('Undo batch-toggle-enabled skipped; no blocks exist') + break + } + + addToQueue({ + id: opId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED, + target: OPERATION_TARGETS.BLOCKS, + payload: { blockIds: validBlockIds, previousStates }, + }, + workflowId: activeWorkflowId, + userId, + }) + + // Use setBlockEnabled to directly restore to previous state + // This is more robust than conditional toggle in collaborative scenarios + validBlockIds.forEach((blockId) => { + workflowStore.setBlockEnabled(blockId, previousStates[blockId]) + }) + break + } + case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES: { + const toggleOp = entry.inverse as BatchToggleHandlesOperation + const { blockIds, previousStates } = toggleOp.data + + const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id]) + if (validBlockIds.length === 0) { + logger.debug('Undo batch-toggle-handles skipped; no blocks exist') + break + } + + addToQueue({ + id: opId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES, + target: OPERATION_TARGETS.BLOCKS, + payload: { blockIds: validBlockIds, previousStates }, + }, + workflowId: activeWorkflowId, + userId, + }) + + // Use setBlockHandles to directly restore to previous state + // This is more robust than conditional toggle in collaborative scenarios + validBlockIds.forEach((blockId) => { + workflowStore.setBlockHandles(blockId, previousStates[blockId]) + }) + break + } + case UNDO_REDO_OPERATIONS.APPLY_DIFF: { const applyDiffInverse = entry.inverse as any const { baselineSnapshot } = applyDiffInverse.data @@ -662,12 +916,11 @@ export function useUndoRedo() { logger.info('Undid apply-diff operation successfully') break } - case 'accept-diff': { + case UNDO_REDO_OPERATIONS.ACCEPT_DIFF: { // Undo accept-diff means restoring diff view with markers const acceptDiffInverse = entry.inverse as any const acceptDiffOp = entry.operation as any const { beforeAccept, diffAnalysis } = acceptDiffInverse.data - const { baselineSnapshot } = acceptDiffOp.data const { useWorkflowDiffStore } = await import('@/stores/workflow-diff/store') const diffStore = useWorkflowDiffStore.getState() @@ -722,10 +975,9 @@ export function useUndoRedo() { logger.info('Undid accept-diff operation - restored diff view') break } - case 'reject-diff': { + case UNDO_REDO_OPERATIONS.REJECT_DIFF: { // Undo reject-diff means restoring diff view with markers const rejectDiffInverse = entry.inverse as any - const rejectDiffOp = entry.operation as any const { beforeReject, diffAnalysis, baselineSnapshot } = rejectDiffInverse.data const { useWorkflowDiffStore } = await import('@/stores/workflow-diff/store') const { useWorkflowStore } = await import('@/stores/workflows/workflow/store') @@ -793,7 +1045,7 @@ export function useUndoRedo() { const opId = crypto.randomUUID() switch (entry.operation.type) { - case 'batch-add-blocks': { + case UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS: { const batchOp = entry.operation as BatchAddBlocksOperation const { blockSnapshots, edgeSnapshots, subBlockValues } = batchOp.data @@ -806,8 +1058,8 @@ export function useUndoRedo() { addToQueue({ id: opId, operation: { - operation: 'batch-add-blocks', - target: 'blocks', + operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS, + target: OPERATION_TARGETS.BLOCKS, payload: { blocks: blocksToAdd, edges: edgeSnapshots || [], @@ -860,7 +1112,7 @@ export function useUndoRedo() { } break } - case 'batch-remove-blocks': { + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: { const batchOp = entry.operation as BatchRemoveBlocksOperation const { blockSnapshots } = batchOp.data const blockIds = blockSnapshots.map((b) => b.id) @@ -874,8 +1126,8 @@ export function useUndoRedo() { addToQueue({ id: opId, operation: { - operation: 'batch-remove-blocks', - target: 'blocks', + operation: BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS, + target: OPERATION_TARGETS.BLOCKS, payload: { ids: existingBlockIds }, }, workflowId: activeWorkflowId, @@ -885,89 +1137,91 @@ export function useUndoRedo() { existingBlockIds.forEach((id) => workflowStore.removeBlock(id)) break } - case 'add-edge': { - // Use snapshot from inverse - const inv = entry.inverse as RemoveEdgeOperation - const snap = inv.data.edgeSnapshot - if (!snap || workflowStore.edges.find((e) => e.id === snap.id)) { - logger.debug('Redo add-edge skipped', { hasSnapshot: Boolean(snap) }) - break + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: { + // Redo batch-remove-edges: remove all edges again + const batchRemoveOp = entry.operation as BatchRemoveEdgesOperation + const { edgeSnapshots } = batchRemoveOp.data + + const edgesToRemove = edgeSnapshots + .filter((e) => workflowStore.edges.find((edge) => edge.id === e.id)) + .map((e) => e.id) + + if (edgesToRemove.length > 0) { + addToQueue({ + id: opId, + operation: { + operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { ids: edgesToRemove }, + }, + workflowId: activeWorkflowId, + userId, + }) + edgesToRemove.forEach((id) => workflowStore.removeEdge(id)) } - addToQueue({ - id: opId, - operation: { - operation: 'add', - target: 'edge', - payload: { ...snap, isRedo: true, originalOpId: entry.id }, - }, - workflowId: activeWorkflowId, - userId, - }) - workflowStore.addEdge(snap) + + logger.debug('Redid batch-remove-edges', { edgeCount: edgesToRemove.length }) break } - case 'remove-edge': { - const { edgeId } = entry.operation.data - if (workflowStore.edges.find((e) => e.id === edgeId)) { + case UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES: { + // Redo batch-add-edges: add all edges again + const batchAddOp = entry.operation as BatchAddEdgesOperation + const { edgeSnapshots } = batchAddOp.data + + const edgesToAdd = edgeSnapshots.filter( + (e) => !workflowStore.edges.find((edge) => edge.id === e.id) + ) + + if (edgesToAdd.length > 0) { addToQueue({ id: opId, operation: { - operation: 'remove', - target: 'edge', - payload: { id: edgeId, isRedo: true, originalOpId: entry.id }, + operation: EDGES_OPERATIONS.BATCH_ADD_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edges: edgesToAdd }, }, workflowId: activeWorkflowId, userId, }) - workflowStore.removeEdge(edgeId) - } else { - logger.debug('Redo remove-edge skipped; edge missing', { - edgeId, - }) + edgesToAdd.forEach((edge) => workflowStore.addEdge(edge)) } + + logger.debug('Redid batch-add-edges', { edgeCount: edgesToAdd.length }) break } - case 'move-block': { - const moveOp = entry.operation as MoveBlockOperation + case UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS: { + const batchMoveOp = entry.operation as BatchMoveBlocksOperation const currentBlocks = useWorkflowStore.getState().blocks - if (currentBlocks[moveOp.data.blockId]) { + const positionUpdates: Array<{ id: string; position: { x: number; y: number } }> = [] + + for (const move of batchMoveOp.data.moves) { + if (currentBlocks[move.blockId]) { + positionUpdates.push({ + id: move.blockId, + position: { x: move.after.x, y: move.after.y }, + }) + workflowStore.updateBlockPosition(move.blockId, { + x: move.after.x, + y: move.after.y, + }) + } + } + + if (positionUpdates.length > 0) { addToQueue({ id: opId, operation: { - operation: 'update-position', - target: 'block', - payload: { - id: moveOp.data.blockId, - position: { x: moveOp.data.after.x, y: moveOp.data.after.y }, - parentId: moveOp.data.after.parentId, - commit: true, - isRedo: true, - originalOpId: entry.id, - }, + operation: BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS, + target: OPERATION_TARGETS.BLOCKS, + payload: { updates: positionUpdates }, }, workflowId: activeWorkflowId, userId, }) - // Use the store from the hook context for React re-renders - workflowStore.updateBlockPosition(moveOp.data.blockId, { - x: moveOp.data.after.x, - y: moveOp.data.after.y, - }) - if (moveOp.data.after.parentId !== moveOp.data.before.parentId) { - workflowStore.updateParentId( - moveOp.data.blockId, - moveOp.data.after.parentId || '', - 'parent' - ) - } - } else { - logger.debug('Redo move-block skipped; block missing', { - blockId: moveOp.data.blockId, - }) } break } - case 'update-parent': { + case UNDO_REDO_OPERATIONS.UPDATE_PARENT: { // Redo parent update means applying the new parent and position const updateOp = entry.operation as UpdateParentOperation const { blockId, newParentId, newPosition, affectedEdges } = updateOp.data @@ -981,8 +1235,8 @@ export function useUndoRedo() { addToQueue({ id: crypto.randomUUID(), operation: { - operation: 'remove', - target: 'edge', + operation: EDGE_OPERATIONS.REMOVE, + target: OPERATION_TARGETS.EDGE, payload: { id: edge.id, isRedo: true }, }, workflowId: activeWorkflowId, @@ -996,8 +1250,8 @@ export function useUndoRedo() { addToQueue({ id: crypto.randomUUID(), operation: { - operation: 'update-position', - target: 'block', + operation: BLOCK_OPERATIONS.UPDATE_POSITION, + target: OPERATION_TARGETS.BLOCK, payload: { id: blockId, position: newPosition, @@ -1017,8 +1271,8 @@ export function useUndoRedo() { addToQueue({ id: opId, operation: { - operation: 'update-parent', - target: 'block', + operation: BLOCK_OPERATIONS.UPDATE_PARENT, + target: OPERATION_TARGETS.BLOCK, payload: { id: blockId, parentId: newParentId || '', @@ -1035,29 +1289,165 @@ export function useUndoRedo() { workflowStore.updateParentId(blockId, newParentId || '', 'parent') // If we're adding TO a subflow, restore edges after + if (newParentId && affectedEdges && affectedEdges.length > 0) { + const edgesToAdd = affectedEdges.filter( + (e) => !workflowStore.edges.find((edge) => edge.id === e.id) + ) + if (edgesToAdd.length > 0) { + addToQueue({ + id: crypto.randomUUID(), + operation: { + operation: EDGES_OPERATIONS.BATCH_ADD_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edges: edgesToAdd }, + }, + workflowId: activeWorkflowId, + userId, + }) + edgesToAdd.forEach((edge) => workflowStore.addEdge(edge)) + } + } + } else { + logger.debug('Redo update-parent skipped; block missing', { blockId }) + } + break + } + case UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT: { + const batchUpdateOp = entry.operation as BatchUpdateParentOperation + const { updates } = batchUpdateOp.data + + const validUpdates = updates.filter((u) => workflowStore.blocks[u.blockId]) + if (validUpdates.length === 0) { + logger.debug('Redo batch-update-parent skipped; no blocks exist') + break + } + + // Process each update + for (const update of validUpdates) { + const { blockId, newParentId, newPosition, affectedEdges } = update + + // Moving INTO subflow (redoing insert) → remove edges first if (newParentId && affectedEdges && affectedEdges.length > 0) { affectedEdges.forEach((edge) => { - if (!workflowStore.edges.find((e) => e.id === edge.id)) { - workflowStore.addEdge(edge) - addToQueue({ - id: crypto.randomUUID(), - operation: { - operation: 'add', - target: 'edge', - payload: { ...edge, isRedo: true }, - }, - workflowId: activeWorkflowId, - userId, - }) + if (workflowStore.edges.find((e) => e.id === edge.id)) { + workflowStore.removeEdge(edge.id) } }) + addToQueue({ + id: crypto.randomUUID(), + operation: { + operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edgeIds: affectedEdges.map((e) => e.id) }, + }, + workflowId: activeWorkflowId, + userId, + }) + } + + // Update position and parent locally + workflowStore.updateBlockPosition(blockId, newPosition) + workflowStore.updateParentId(blockId, newParentId || '', 'parent') + + // Moving OUT of subflow (redoing removal) → restore edges after + if (!newParentId && affectedEdges && affectedEdges.length > 0) { + const edgesToAdd = affectedEdges.filter( + (e) => !workflowStore.edges.find((edge) => edge.id === e.id) + ) + if (edgesToAdd.length > 0) { + addToQueue({ + id: crypto.randomUUID(), + operation: { + operation: EDGES_OPERATIONS.BATCH_ADD_EDGES, + target: OPERATION_TARGETS.EDGES, + payload: { edges: edgesToAdd }, + }, + workflowId: activeWorkflowId, + userId, + }) + edgesToAdd.forEach((edge) => workflowStore.addEdge(edge)) + } } - } else { - logger.debug('Redo update-parent skipped; block missing', { blockId }) } + + // Send batch update to server + addToQueue({ + id: opId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT, + target: OPERATION_TARGETS.BLOCKS, + payload: { + updates: validUpdates.map((u) => ({ + id: u.blockId, + parentId: u.newParentId || '', + position: u.newPosition, + })), + }, + }, + workflowId: activeWorkflowId, + userId, + }) + + logger.debug('Redid batch-update-parent', { updateCount: validUpdates.length }) + break + } + case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED: { + const toggleOp = entry.operation as BatchToggleEnabledOperation + const { blockIds, previousStates } = toggleOp.data + + const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id]) + if (validBlockIds.length === 0) { + logger.debug('Redo batch-toggle-enabled skipped; no blocks exist') + break + } + + addToQueue({ + id: opId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED, + target: OPERATION_TARGETS.BLOCKS, + payload: { blockIds: validBlockIds, previousStates }, + }, + workflowId: activeWorkflowId, + userId, + }) + + // Use setBlockEnabled to directly set to toggled state + // Redo sets to !previousStates (the state after the original toggle) + validBlockIds.forEach((blockId) => { + workflowStore.setBlockEnabled(blockId, !previousStates[blockId]) + }) + break + } + case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES: { + const toggleOp = entry.operation as BatchToggleHandlesOperation + const { blockIds, previousStates } = toggleOp.data + + const validBlockIds = blockIds.filter((id) => workflowStore.blocks[id]) + if (validBlockIds.length === 0) { + logger.debug('Redo batch-toggle-handles skipped; no blocks exist') + break + } + + addToQueue({ + id: opId, + operation: { + operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES, + target: OPERATION_TARGETS.BLOCKS, + payload: { blockIds: validBlockIds, previousStates }, + }, + workflowId: activeWorkflowId, + userId, + }) + + // Use setBlockHandles to directly set to toggled state + // Redo sets to !previousStates (the state after the original toggle) + validBlockIds.forEach((blockId) => { + workflowStore.setBlockHandles(blockId, !previousStates[blockId]) + }) break } - case 'apply-diff': { + case UNDO_REDO_OPERATIONS.APPLY_DIFF: { // Redo apply-diff means re-applying the proposed state with diff markers const applyDiffOp = entry.operation as any const { proposedState, diffAnalysis, baselineSnapshot } = applyDiffOp.data @@ -1114,7 +1504,7 @@ export function useUndoRedo() { logger.info('Redid apply-diff operation') break } - case 'accept-diff': { + case UNDO_REDO_OPERATIONS.ACCEPT_DIFF: { // Redo accept-diff means re-accepting (stripping markers) const acceptDiffOp = entry.operation as any const { afterAccept } = acceptDiffOp.data @@ -1168,7 +1558,7 @@ export function useUndoRedo() { logger.info('Redid accept-diff operation - cleared diff view') break } - case 'reject-diff': { + case UNDO_REDO_OPERATIONS.REJECT_DIFF: { // Redo reject-diff means re-rejecting (restoring baseline, clearing diff) const rejectDiffOp = entry.operation as any const { afterReject } = rejectDiffOp.data @@ -1246,7 +1636,7 @@ export function useUndoRedo() { const operation: any = { id: crypto.randomUUID(), - type: 'apply-diff', + type: UNDO_REDO_OPERATIONS.APPLY_DIFF, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -1259,7 +1649,7 @@ export function useUndoRedo() { const inverse: any = { id: crypto.randomUUID(), - type: 'apply-diff', + type: UNDO_REDO_OPERATIONS.APPLY_DIFF, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -1290,7 +1680,7 @@ export function useUndoRedo() { const operation: any = { id: crypto.randomUUID(), - type: 'accept-diff', + type: UNDO_REDO_OPERATIONS.ACCEPT_DIFF, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -1304,7 +1694,7 @@ export function useUndoRedo() { const inverse: any = { id: crypto.randomUUID(), - type: 'accept-diff', + type: UNDO_REDO_OPERATIONS.ACCEPT_DIFF, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -1330,7 +1720,7 @@ export function useUndoRedo() { const operation: any = { id: crypto.randomUUID(), - type: 'reject-diff', + type: UNDO_REDO_OPERATIONS.REJECT_DIFF, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -1344,7 +1734,7 @@ export function useUndoRedo() { const inverse: any = { id: crypto.randomUUID(), - type: 'reject-diff', + type: UNDO_REDO_OPERATIONS.REJECT_DIFF, timestamp: Date.now(), workflowId: activeWorkflowId, userId, @@ -1372,9 +1762,12 @@ export function useUndoRedo() { recordBatchAddBlocks, recordBatchRemoveBlocks, recordAddEdge, - recordRemoveEdge, - recordMove, + recordBatchRemoveEdges, + recordBatchMoveBlocks, recordUpdateParent, + recordBatchUpdateParent, + recordBatchToggleEnabled, + recordBatchToggleHandles, recordApplyDiff, recordAcceptDiff, recordRejectDiff, diff --git a/apps/sim/hooks/use-webhook-management.ts b/apps/sim/hooks/use-webhook-management.ts index e71a0cedb3..3df45eee07 100644 --- a/apps/sim/hooks/use-webhook-management.ts +++ b/apps/sim/hooks/use-webhook-management.ts @@ -302,7 +302,11 @@ export function useWebhookManagement({ effectiveTriggerId: string | undefined, selectedCredentialId: string | null ): Promise => { - const triggerConfig = useSubBlockStore.getState().getValue(blockId, 'triggerConfig') + const triggerConfigRaw = useSubBlockStore.getState().getValue(blockId, 'triggerConfig') + const triggerConfig = + typeof triggerConfigRaw === 'object' && triggerConfigRaw !== null + ? (triggerConfigRaw as Record) + : {} const isCredentialSet = selectedCredentialId?.startsWith(CREDENTIAL_SET_PREFIX) const credentialSetId = isCredentialSet diff --git a/apps/sim/lib/logs/execution/logging-factory.ts b/apps/sim/lib/logs/execution/logging-factory.ts index 5d5e5f8eb3..be7e2d5fc5 100644 --- a/apps/sim/lib/logs/execution/logging-factory.ts +++ b/apps/sim/lib/logs/execution/logging-factory.ts @@ -1,3 +1,5 @@ +import { db, workflow } from '@sim/db' +import { eq } from 'drizzle-orm' import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants' import type { ExecutionEnvironment, ExecutionTrigger, WorkflowState } from '@/lib/logs/types' import { @@ -34,7 +36,15 @@ export function createEnvironmentObject( } export async function loadWorkflowStateForExecution(workflowId: string): Promise { - const normalizedData = await loadWorkflowFromNormalizedTables(workflowId) + const [normalizedData, workflowRecord] = await Promise.all([ + loadWorkflowFromNormalizedTables(workflowId), + db + .select({ variables: workflow.variables }) + .from(workflow) + .where(eq(workflow.id, workflowId)) + .limit(1) + .then((rows) => rows[0]), + ]) if (!normalizedData) { throw new Error( @@ -47,6 +57,7 @@ export async function loadWorkflowStateForExecution(workflowId: string): Promise edges: normalizedData.edges || [], loops: normalizedData.loops || {}, parallels: normalizedData.parallels || {}, + variables: (workflowRecord?.variables as WorkflowState['variables']) || undefined, } } @@ -65,6 +76,7 @@ export async function loadDeployedWorkflowStateForLogging( edges: deployedData.edges || [], loops: deployedData.loops || {}, parallels: deployedData.parallels || {}, + variables: deployedData.variables as WorkflowState['variables'], } } diff --git a/apps/sim/lib/logs/execution/snapshot/service.test.ts b/apps/sim/lib/logs/execution/snapshot/service.test.ts index 091bdb4a1b..543a2b1a16 100644 --- a/apps/sim/lib/logs/execution/snapshot/service.test.ts +++ b/apps/sim/lib/logs/execution/snapshot/service.test.ts @@ -105,7 +105,7 @@ describe('SnapshotService', () => { block1: { ...baseState.blocks.block1, // Different block state - we can change outputs to make it different - outputs: { response: { content: 'different result' } as Record }, + outputs: { response: { type: 'string', description: 'different result' } }, }, }, } @@ -177,7 +177,7 @@ describe('SnapshotService', () => { }, }, outputs: { - response: { content: 'Agent response' } as Record, + response: { type: 'string', description: 'Agent response' }, }, enabled: true, horizontalHandles: true, @@ -211,5 +211,113 @@ describe('SnapshotService', () => { const hash2 = service.computeStateHash(complexState) expect(hash).toBe(hash2) }) + + test('should include variables in hash computation', () => { + const stateWithVariables: WorkflowState = { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + variables: { + 'var-1': { + id: 'var-1', + name: 'apiKey', + type: 'string', + value: 'secret123', + }, + }, + } + + const stateWithoutVariables: WorkflowState = { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + } + + const hashWith = service.computeStateHash(stateWithVariables) + const hashWithout = service.computeStateHash(stateWithoutVariables) + + expect(hashWith).not.toBe(hashWithout) + }) + + test('should detect changes in variable values', () => { + const state1: WorkflowState = { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + variables: { + 'var-1': { + id: 'var-1', + name: 'myVar', + type: 'string', + value: 'value1', + }, + }, + } + + const state2: WorkflowState = { + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + variables: { + 'var-1': { + id: 'var-1', + name: 'myVar', + type: 'string', + value: 'value2', // Different value + }, + }, + } + + const hash1 = service.computeStateHash(state1) + const hash2 = service.computeStateHash(state2) + + expect(hash1).not.toBe(hash2) + }) + + test('should generate consistent hashes for states with variables', () => { + const stateWithVariables: WorkflowState = { + blocks: { + block1: { + id: 'block1', + name: 'Test', + type: 'agent', + position: { x: 0, y: 0 }, + subBlocks: {}, + outputs: {}, + enabled: true, + horizontalHandles: true, + advancedMode: false, + height: 0, + }, + }, + edges: [], + loops: {}, + parallels: {}, + variables: { + 'var-1': { + id: 'var-1', + name: 'testVar', + type: 'plain', + value: 'testValue', + }, + 'var-2': { + id: 'var-2', + name: 'anotherVar', + type: 'number', + value: 42, + }, + }, + } + + const hash1 = service.computeStateHash(stateWithVariables) + const hash2 = service.computeStateHash(stateWithVariables) + + expect(hash1).toBe(hash2) + expect(hash1).toHaveLength(64) + }) }) }) diff --git a/apps/sim/lib/logs/execution/snapshot/service.ts b/apps/sim/lib/logs/execution/snapshot/service.ts index b28e94e529..d753cbbd87 100644 --- a/apps/sim/lib/logs/execution/snapshot/service.ts +++ b/apps/sim/lib/logs/execution/snapshot/service.ts @@ -182,11 +182,15 @@ export class SnapshotService implements ISnapshotService { normalizedParallels[parallelId] = normalizeValue(parallel) } + // 4. Normalize variables (if present) + const normalizedVariables = state.variables ? normalizeValue(state.variables) : undefined + return { blocks: normalizedBlocks, edges: normalizedEdges, loops: normalizedLoops, parallels: normalizedParallels, + ...(normalizedVariables !== undefined && { variables: normalizedVariables }), } } } diff --git a/apps/sim/lib/mcp/workflow-mcp-sync.ts b/apps/sim/lib/mcp/workflow-mcp-sync.ts index c6055a713b..447eeefc6f 100644 --- a/apps/sim/lib/mcp/workflow-mcp-sync.ts +++ b/apps/sim/lib/mcp/workflow-mcp-sync.ts @@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils' +import type { WorkflowState } from '@/stores/workflows/workflow/types' import { extractInputFormatFromBlocks, generateToolInputSchema } from './workflow-tool-schema' const logger = createLogger('WorkflowMcpSync') @@ -59,7 +60,7 @@ export async function syncMcpToolsForWorkflow(options: SyncOptions): Promise ({ }), })) +vi.mock('dns', () => ({ + resolveMx: ( + _domain: string, + callback: (err: Error | null, addresses: { exchange: string; priority: number }[]) => void + ) => { + callback(null, [{ exchange: 'mail.example.com', priority: 10 }]) + }, +})) + describe('Email Validation', () => { describe('validateEmail', () => { it.concurrent('should validate a correct email', async () => { diff --git a/apps/sim/lib/workflows/autolayout/types.ts b/apps/sim/lib/workflows/autolayout/types.ts index a20c35715a..7f8cf78190 100644 --- a/apps/sim/lib/workflows/autolayout/types.ts +++ b/apps/sim/lib/workflows/autolayout/types.ts @@ -1,5 +1,8 @@ import type { BlockState, Position } from '@/stores/workflows/workflow/types' +export type { Edge } from 'reactflow' +export type { Loop, Parallel } from '@/stores/workflows/workflow/types' + export interface LayoutOptions { horizontalSpacing?: number verticalSpacing?: number @@ -12,30 +15,6 @@ export interface LayoutResult { error?: string } -export interface Edge { - id: string - source: string - target: string - sourceHandle?: string | null - targetHandle?: string | null -} - -export interface Loop { - id: string - nodes: string[] - iterations: number - loopType: 'for' | 'forEach' | 'while' | 'doWhile' - forEachItems?: any[] | Record | string // Items or expression - whileCondition?: string // JS expression that evaluates to boolean -} - -export interface Parallel { - id: string - nodes: string[] - count?: number - parallelType?: 'count' | 'collection' -} - export interface BlockMetrics { width: number height: number diff --git a/apps/sim/lib/workflows/blocks/block-outputs.ts b/apps/sim/lib/workflows/blocks/block-outputs.ts index 6ea6eef84f..2fabf9692f 100644 --- a/apps/sim/lib/workflows/blocks/block-outputs.ts +++ b/apps/sim/lib/workflows/blocks/block-outputs.ts @@ -11,10 +11,23 @@ import { USER_FILE_PROPERTY_TYPES, } from '@/lib/workflows/types' import { getBlock } from '@/blocks' -import type { BlockConfig, OutputCondition } from '@/blocks/types' +import type { BlockConfig, OutputCondition, OutputFieldDefinition } from '@/blocks/types' import { getTrigger, isTriggerValid } from '@/triggers' -type OutputDefinition = Record +type OutputDefinition = Record + +interface SubBlockWithValue { + value?: unknown +} + +type ConditionValue = string | number | boolean + +/** + * Checks if a value is a valid primitive for condition comparison. + */ +function isConditionPrimitive(value: unknown): value is ConditionValue { + return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean' +} /** * Evaluates an output condition against subBlock values. @@ -22,7 +35,7 @@ type OutputDefinition = Record */ function evaluateOutputCondition( condition: OutputCondition, - subBlocks: Record | undefined + subBlocks: Record | undefined ): boolean { if (!subBlocks) return false @@ -30,7 +43,8 @@ function evaluateOutputCondition( let matches: boolean if (Array.isArray(condition.value)) { - matches = condition.value.includes(fieldValue) + // For array conditions, check if fieldValue is a valid primitive and included + matches = isConditionPrimitive(fieldValue) && condition.value.includes(fieldValue) } else { matches = fieldValue === condition.value } @@ -44,7 +58,8 @@ function evaluateOutputCondition( let andMatches: boolean if (Array.isArray(condition.and.value)) { - andMatches = condition.and.value.includes(andFieldValue) + andMatches = + isConditionPrimitive(andFieldValue) && condition.and.value.includes(andFieldValue) } else { andMatches = andFieldValue === condition.and.value } @@ -65,7 +80,7 @@ function evaluateOutputCondition( */ function filterOutputsByCondition( outputs: OutputDefinition, - subBlocks: Record | undefined + subBlocks: Record | undefined ): OutputDefinition { const filtered: OutputDefinition = {} @@ -119,7 +134,7 @@ function hasInputFormat(blockConfig: BlockConfig): boolean { } function getTriggerId( - subBlocks: Record | undefined, + subBlocks: Record | undefined, blockConfig: BlockConfig ): string | undefined { const selectedTriggerIdValue = subBlocks?.selectedTriggerId?.value @@ -136,13 +151,17 @@ function getTriggerId( ) } -function getUnifiedStartOutputs(subBlocks: Record | undefined): OutputDefinition { +function getUnifiedStartOutputs( + subBlocks: Record | undefined +): OutputDefinition { const outputs = { ...UNIFIED_START_OUTPUTS } const normalizedInputFormat = normalizeInputFormatValue(subBlocks?.inputFormat?.value) return applyInputFormatFields(normalizedInputFormat, outputs) } -function getLegacyStarterOutputs(subBlocks: Record | undefined): OutputDefinition { +function getLegacyStarterOutputs( + subBlocks: Record | undefined +): OutputDefinition { const startWorkflowValue = subBlocks?.startWorkflow?.value if (startWorkflowValue === 'chat') { @@ -179,7 +198,7 @@ function shouldClearBaseOutputs( function applyInputFormatToOutputs( blockType: string, blockConfig: BlockConfig, - subBlocks: Record | undefined, + subBlocks: Record | undefined, baseOutputs: OutputDefinition ): OutputDefinition { if (!hasInputFormat(blockConfig) || !subBlocks?.inputFormat?.value) { @@ -203,7 +222,7 @@ function applyInputFormatToOutputs( export function getBlockOutputs( blockType: string, - subBlocks?: Record, + subBlocks?: Record, triggerMode?: boolean ): OutputDefinition { const blockConfig = getBlock(blockType) @@ -214,7 +233,8 @@ export function getBlockOutputs( if (triggerId && isTriggerValid(triggerId)) { const trigger = getTrigger(triggerId) if (trigger.outputs) { - return trigger.outputs + // TriggerOutput is compatible with OutputFieldDefinition at runtime + return trigger.outputs as OutputDefinition } } } @@ -226,7 +246,7 @@ export function getBlockOutputs( } if (blockType === 'human_in_the_loop') { - const hitlOutputs: Record = { + const hitlOutputs: OutputDefinition = { url: { type: 'string', description: 'Resume UI URL' }, resumeEndpoint: { type: 'string', @@ -251,7 +271,7 @@ export function getBlockOutputs( if (blockType === 'approval') { // Start with only url (apiUrl commented out - not accessible as output) - const pauseResumeOutputs: Record = { + const pauseResumeOutputs: OutputDefinition = { url: { type: 'string', description: 'Resume UI URL' }, // apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output } @@ -285,7 +305,7 @@ function shouldFilterReservedField( blockType: string, key: string, prefix: string, - subBlocks: Record | undefined + subBlocks: Record | undefined ): boolean { if (blockType !== TRIGGER_TYPES.START || prefix) { return false @@ -308,7 +328,7 @@ function expandFileTypeProperties(path: string): string[] { function collectOutputPaths( obj: OutputDefinition, blockType: string, - subBlocks: Record | undefined, + subBlocks: Record | undefined, prefix = '' ): string[] { const paths: string[] = [] @@ -321,13 +341,14 @@ function collectOutputPaths( } if (value && typeof value === 'object' && 'type' in value) { - if (value.type === 'files') { + const typedValue = value as { type: unknown } + if (typedValue.type === 'files') { paths.push(...expandFileTypeProperties(path)) } else { paths.push(path) } } else if (value && typeof value === 'object' && !Array.isArray(value)) { - paths.push(...collectOutputPaths(value, blockType, subBlocks, path)) + paths.push(...collectOutputPaths(value as OutputDefinition, blockType, subBlocks, path)) } else { paths.push(path) } @@ -338,7 +359,7 @@ function collectOutputPaths( export function getBlockOutputPaths( blockType: string, - subBlocks?: Record, + subBlocks?: Record, triggerMode?: boolean ): string[] { const outputs = getBlockOutputs(blockType, subBlocks, triggerMode) @@ -351,39 +372,45 @@ function getFilePropertyType(outputs: OutputDefinition, pathParts: string[]): st return null } - let current: any = outputs + let current: unknown = outputs for (const part of pathParts.slice(0, -1)) { if (!current || typeof current !== 'object') { return null } - current = current[part] + current = (current as Record)[part] } - if (current && typeof current === 'object' && 'type' in current && current.type === 'files') { + if ( + current && + typeof current === 'object' && + 'type' in current && + (current as { type: unknown }).type === 'files' + ) { return USER_FILE_PROPERTY_TYPES[lastPart as keyof typeof USER_FILE_PROPERTY_TYPES] } return null } -function traverseOutputPath(outputs: OutputDefinition, pathParts: string[]): any { - let current: any = outputs +function traverseOutputPath(outputs: OutputDefinition, pathParts: string[]): unknown { + let current: unknown = outputs for (const part of pathParts) { if (!current || typeof current !== 'object') { return null } - current = current[part] + current = (current as Record)[part] } return current } -function extractType(value: any): string { +function extractType(value: unknown): string { if (!value) return 'any' if (typeof value === 'object' && 'type' in value) { - return value.type + const typeValue = (value as { type: unknown }).type + return typeof typeValue === 'string' ? typeValue : 'any' } return typeof value === 'string' ? value : 'any' @@ -392,7 +419,7 @@ function extractType(value: any): string { export function getBlockOutputType( blockType: string, outputPath: string, - subBlocks?: Record, + subBlocks?: Record, triggerMode?: boolean ): string { const outputs = getBlockOutputs(blockType, subBlocks, triggerMode) diff --git a/apps/sim/lib/workflows/comparison/compare.ts b/apps/sim/lib/workflows/comparison/compare.ts index a34521e23b..4f038cd8c2 100644 --- a/apps/sim/lib/workflows/comparison/compare.ts +++ b/apps/sim/lib/workflows/comparison/compare.ts @@ -51,8 +51,8 @@ export function hasWorkflowChanged( } // 3. Build normalized representations of blocks for comparison - const normalizedCurrentBlocks: Record = {} - const normalizedDeployedBlocks: Record = {} + const normalizedCurrentBlocks: Record = {} + const normalizedDeployedBlocks: Record = {} for (const blockId of currentBlockIds) { const currentBlock = currentState.blocks[blockId] @@ -120,8 +120,9 @@ export function hasWorkflowChanged( } // Get values with special handling for null/undefined - let currentValue = currentSubBlocks[subBlockId].value ?? null - let deployedValue = deployedSubBlocks[subBlockId].value ?? null + // Using unknown type since sanitization functions return different types + let currentValue: unknown = currentSubBlocks[subBlockId].value ?? null + let deployedValue: unknown = deployedSubBlocks[subBlockId].value ?? null if (subBlockId === 'tools' && Array.isArray(currentValue) && Array.isArray(deployedValue)) { currentValue = sanitizeTools(currentValue) @@ -232,8 +233,8 @@ export function hasWorkflowChanged( } // 6. Compare variables - const currentVariables = normalizeVariables((currentState as any).variables) - const deployedVariables = normalizeVariables((deployedState as any).variables) + const currentVariables = normalizeVariables(currentState.variables) + const deployedVariables = normalizeVariables(deployedState.variables) const normalizedCurrentVars = normalizeValue( Object.fromEntries(Object.entries(currentVariables).map(([id, v]) => [id, sanitizeVariable(v)])) diff --git a/apps/sim/lib/workflows/comparison/normalize.test.ts b/apps/sim/lib/workflows/comparison/normalize.test.ts index c144694564..ca22205876 100644 --- a/apps/sim/lib/workflows/comparison/normalize.test.ts +++ b/apps/sim/lib/workflows/comparison/normalize.test.ts @@ -2,6 +2,7 @@ * Tests for workflow normalization utilities */ import { describe, expect, it } from 'vitest' +import type { Loop, Parallel } from '@/stores/workflows/workflow/types' import { normalizedStringify, normalizeEdge, @@ -39,7 +40,7 @@ describe('Workflow Normalization Utilities', () => { it.concurrent('should sort object keys alphabetically', () => { const input = { zebra: 1, apple: 2, mango: 3 } - const result = normalizeValue(input) + const result = normalizeValue(input) as Record expect(Object.keys(result)).toEqual(['apple', 'mango', 'zebra']) }) @@ -55,7 +56,10 @@ describe('Workflow Normalization Utilities', () => { }, first: 'value', } - const result = normalizeValue(input) + const result = normalizeValue(input) as { + first: string + outer: { z: number; a: { y: number; b: number } } + } expect(Object.keys(result)).toEqual(['first', 'outer']) expect(Object.keys(result.outer)).toEqual(['a', 'z']) @@ -72,11 +76,11 @@ describe('Workflow Normalization Utilities', () => { it.concurrent('should handle arrays with mixed types', () => { const input = [1, 'string', { b: 2, a: 1 }, null, [3, 2, 1]] - const result = normalizeValue(input) + const result = normalizeValue(input) as unknown[] expect(result[0]).toBe(1) expect(result[1]).toBe('string') - expect(Object.keys(result[2])).toEqual(['a', 'b']) + expect(Object.keys(result[2] as Record)).toEqual(['a', 'b']) expect(result[3]).toBe(null) expect(result[4]).toEqual([3, 2, 1]) // Array order preserved }) @@ -94,7 +98,9 @@ describe('Workflow Normalization Utilities', () => { }, }, } - const result = normalizeValue(input) + const result = normalizeValue(input) as { + level1: { level2: { level3: { level4: { z: string; a: string } } } } + } expect(Object.keys(result.level1.level2.level3.level4)).toEqual(['a', 'z']) }) @@ -143,7 +149,7 @@ describe('Workflow Normalization Utilities', () => { }) it.concurrent('should normalize "for" loop type', () => { - const loop = { + const loop: Loop & { extraField?: string } = { id: 'loop1', nodes: ['block1', 'block2'], loopType: 'for', @@ -164,7 +170,7 @@ describe('Workflow Normalization Utilities', () => { }) it.concurrent('should normalize "forEach" loop type', () => { - const loop = { + const loop: Loop = { id: 'loop2', nodes: ['block1'], loopType: 'forEach', @@ -183,10 +189,11 @@ describe('Workflow Normalization Utilities', () => { }) it.concurrent('should normalize "while" loop type', () => { - const loop = { + const loop: Loop = { id: 'loop3', nodes: ['block1', 'block2', 'block3'], loopType: 'while', + iterations: 0, whileCondition: ' === true', doWhileCondition: 'should-be-excluded', } @@ -201,10 +208,11 @@ describe('Workflow Normalization Utilities', () => { }) it.concurrent('should normalize "doWhile" loop type', () => { - const loop = { + const loop: Loop = { id: 'loop4', nodes: ['block1'], loopType: 'doWhile', + iterations: 0, doWhileCondition: ' < 100', whileCondition: 'should-be-excluded', } @@ -218,11 +226,11 @@ describe('Workflow Normalization Utilities', () => { }) }) - it.concurrent('should handle unknown loop type with base fields only', () => { - const loop = { + it.concurrent('should extract only relevant fields for for loop type', () => { + const loop: Loop = { id: 'loop5', nodes: ['block1'], - loopType: 'unknown', + loopType: 'for', iterations: 5, forEachItems: 'items', } @@ -231,7 +239,8 @@ describe('Workflow Normalization Utilities', () => { expect(result).toEqual({ id: 'loop5', nodes: ['block1'], - loopType: 'unknown', + loopType: 'for', + iterations: 5, }) }) }) @@ -243,7 +252,7 @@ describe('Workflow Normalization Utilities', () => { }) it.concurrent('should normalize "count" parallel type', () => { - const parallel = { + const parallel: Parallel & { extraField?: string } = { id: 'parallel1', nodes: ['block1', 'block2'], parallelType: 'count', @@ -262,7 +271,7 @@ describe('Workflow Normalization Utilities', () => { }) it.concurrent('should normalize "collection" parallel type', () => { - const parallel = { + const parallel: Parallel = { id: 'parallel2', nodes: ['block1'], parallelType: 'collection', @@ -279,11 +288,11 @@ describe('Workflow Normalization Utilities', () => { }) }) - it.concurrent('should handle unknown parallel type with base fields only', () => { - const parallel = { + it.concurrent('should include base fields for undefined parallel type', () => { + const parallel: Parallel = { id: 'parallel3', nodes: ['block1'], - parallelType: 'unknown', + parallelType: undefined, count: 5, distribution: 'items', } @@ -292,7 +301,7 @@ describe('Workflow Normalization Utilities', () => { expect(result).toEqual({ id: 'parallel3', nodes: ['block1'], - parallelType: 'unknown', + parallelType: undefined, }) }) }) @@ -312,7 +321,7 @@ describe('Workflow Normalization Utilities', () => { const tools = [ { id: 'tool1', name: 'Search', isExpanded: true }, { id: 'tool2', name: 'Calculator', isExpanded: false }, - { id: 'tool3', name: 'Weather' }, // No isExpanded field + { id: 'tool3', name: 'Weather' }, ] const result = sanitizeTools(tools) @@ -365,7 +374,7 @@ describe('Workflow Normalization Utilities', () => { const inputFormat = [ { id: 'input1', name: 'Name', value: 'John', collapsed: true }, { id: 'input2', name: 'Age', value: 25, collapsed: false }, - { id: 'input3', name: 'Email' }, // No value or collapsed + { id: 'input3', name: 'Email' }, ] const result = sanitizeInputFormat(inputFormat) diff --git a/apps/sim/lib/workflows/comparison/normalize.ts b/apps/sim/lib/workflows/comparison/normalize.ts index bbc60c81ae..571f201138 100644 --- a/apps/sim/lib/workflows/comparison/normalize.ts +++ b/apps/sim/lib/workflows/comparison/normalize.ts @@ -3,12 +3,15 @@ * Used by both client-side signature computation and server-side comparison. */ +import type { Edge } from 'reactflow' +import type { Loop, Parallel, Variable } from '@/stores/workflows/workflow/types' + /** * Normalizes a value for consistent comparison by sorting object keys recursively * @param value - The value to normalize * @returns A normalized version of the value with sorted keys */ -export function normalizeValue(value: any): any { +export function normalizeValue(value: unknown): unknown { if (value === null || value === undefined || typeof value !== 'object') { return value } @@ -17,9 +20,9 @@ export function normalizeValue(value: any): any { return value.map(normalizeValue) } - const sorted: Record = {} - for (const key of Object.keys(value).sort()) { - sorted[key] = normalizeValue(value[key]) + const sorted: Record = {} + for (const key of Object.keys(value as Record).sort()) { + sorted[key] = normalizeValue((value as Record)[key]) } return sorted } @@ -29,19 +32,30 @@ export function normalizeValue(value: any): any { * @param value - The value to normalize and stringify * @returns A normalized JSON string */ -export function normalizedStringify(value: any): string { +export function normalizedStringify(value: unknown): string { return JSON.stringify(normalizeValue(value)) } +/** Normalized loop result type with only essential fields */ +interface NormalizedLoop { + id: string + nodes: string[] + loopType: Loop['loopType'] + iterations?: number + forEachItems?: Loop['forEachItems'] + whileCondition?: string + doWhileCondition?: string +} + /** * Normalizes a loop configuration by extracting only the relevant fields for the loop type * @param loop - The loop configuration object * @returns Normalized loop with only relevant fields */ -export function normalizeLoop(loop: any): any { +export function normalizeLoop(loop: Loop | null | undefined): NormalizedLoop | null | undefined { if (!loop) return loop const { id, nodes, loopType, iterations, forEachItems, whileCondition, doWhileCondition } = loop - const base: any = { id, nodes, loopType } + const base: Pick = { id, nodes, loopType } switch (loopType) { case 'for': @@ -57,15 +71,30 @@ export function normalizeLoop(loop: any): any { } } +/** Normalized parallel result type with only essential fields */ +interface NormalizedParallel { + id: string + nodes: string[] + parallelType: Parallel['parallelType'] + count?: number + distribution?: Parallel['distribution'] +} + /** * Normalizes a parallel configuration by extracting only the relevant fields for the parallel type * @param parallel - The parallel configuration object * @returns Normalized parallel with only relevant fields */ -export function normalizeParallel(parallel: any): any { +export function normalizeParallel( + parallel: Parallel | null | undefined +): NormalizedParallel | null | undefined { if (!parallel) return parallel const { id, nodes, parallelType, count, distribution } = parallel - const base: any = { id, nodes, parallelType } + const base: Pick = { + id, + nodes, + parallelType, + } switch (parallelType) { case 'count': @@ -77,23 +106,37 @@ export function normalizeParallel(parallel: any): any { } } +/** Tool configuration with optional UI-only isExpanded field */ +type ToolWithExpanded = Record & { isExpanded?: boolean } + /** * Sanitizes tools array by removing UI-only fields like isExpanded * @param tools - Array of tool configurations * @returns Sanitized tools array */ -export function sanitizeTools(tools: any[] | undefined): any[] { +export function sanitizeTools(tools: unknown[] | undefined): Record[] { if (!Array.isArray(tools)) return [] - return tools.map(({ isExpanded, ...rest }) => rest) + return tools.map((tool) => { + if (tool && typeof tool === 'object' && !Array.isArray(tool)) { + const { isExpanded, ...rest } = tool as ToolWithExpanded + return rest + } + return tool as Record + }) } +/** Variable with optional UI-only validationError field */ +type VariableWithValidation = Variable & { validationError?: string } + /** * Sanitizes a variable by removing UI-only fields like validationError * @param variable - The variable object * @returns Sanitized variable object */ -export function sanitizeVariable(variable: any): any { +export function sanitizeVariable( + variable: VariableWithValidation | null | undefined +): Omit | null | undefined { if (!variable || typeof variable !== 'object') return variable const { validationError, ...rest } = variable return rest @@ -105,21 +148,38 @@ export function sanitizeVariable(variable: any): any { * @param variables - The variables to normalize * @returns A normalized variables object */ -export function normalizeVariables(variables: any): Record { +export function normalizeVariables(variables: unknown): Record { if (!variables) return {} if (Array.isArray(variables)) return {} if (typeof variables !== 'object') return {} - return variables + return variables as Record } +/** Input format item with optional UI-only fields */ +type InputFormatItem = Record & { value?: unknown; collapsed?: boolean } + /** * Sanitizes inputFormat array by removing UI-only fields like value and collapsed * @param inputFormat - Array of input format configurations * @returns Sanitized input format array */ -export function sanitizeInputFormat(inputFormat: any[] | undefined): any[] { +export function sanitizeInputFormat(inputFormat: unknown[] | undefined): Record[] { if (!Array.isArray(inputFormat)) return [] - return inputFormat.map(({ value, collapsed, ...rest }) => rest) + return inputFormat.map((item) => { + if (item && typeof item === 'object' && !Array.isArray(item)) { + const { value, collapsed, ...rest } = item as InputFormatItem + return rest + } + return item as Record + }) +} + +/** Normalized edge with only connection-relevant fields */ +interface NormalizedEdge { + source: string + sourceHandle?: string | null + target: string + targetHandle?: string | null } /** @@ -127,12 +187,7 @@ export function sanitizeInputFormat(inputFormat: any[] | undefined): any[] { * @param edge - The edge object * @returns Normalized edge with only connection fields */ -export function normalizeEdge(edge: any): { - source: string - sourceHandle?: string - target: string - targetHandle?: string -} { +export function normalizeEdge(edge: Edge): NormalizedEdge { return { source: edge.source, sourceHandle: edge.sourceHandle, @@ -147,8 +202,18 @@ export function normalizeEdge(edge: any): { * @returns Sorted array of normalized edges */ export function sortEdges( - edges: Array<{ source: string; sourceHandle?: string; target: string; targetHandle?: string }> -): Array<{ source: string; sourceHandle?: string; target: string; targetHandle?: string }> { + edges: Array<{ + source: string + sourceHandle?: string | null + target: string + targetHandle?: string | null + }> +): Array<{ + source: string + sourceHandle?: string | null + target: string + targetHandle?: string | null +}> { return [...edges].sort((a, b) => `${a.source}-${a.sourceHandle}-${a.target}-${a.targetHandle}`.localeCompare( `${b.source}-${b.sourceHandle}-${b.target}-${b.targetHandle}` diff --git a/apps/sim/lib/workflows/credentials/credential-extractor.ts b/apps/sim/lib/workflows/credentials/credential-extractor.ts index 014febabc5..2fb757ba49 100644 --- a/apps/sim/lib/workflows/credentials/credential-extractor.ts +++ b/apps/sim/lib/workflows/credentials/credential-extractor.ts @@ -1,6 +1,15 @@ import { getBlock } from '@/blocks/registry' import type { SubBlockConfig } from '@/blocks/types' import { AuthMode } from '@/blocks/types' +import type { BlockState, SubBlockState, WorkflowState } from '@/stores/workflows/workflow/types' + +/** Condition type for SubBlock visibility - mirrors the inline type from blocks/types.ts */ +interface SubBlockCondition { + field: string + value: string | number | boolean | Array | undefined + not?: boolean + and?: SubBlockCondition +} // Credential types based on actual patterns in the codebase export enum CredentialType { @@ -48,7 +57,9 @@ const WORKSPACE_SPECIFIC_FIELDS = new Set([ * Extract required credentials from a workflow state * This analyzes all blocks and their subblocks to identify credential requirements */ -export function extractRequiredCredentials(state: any): CredentialRequirement[] { +export function extractRequiredCredentials( + state: Partial | null | undefined +): CredentialRequirement[] { const credentials: CredentialRequirement[] = [] const seen = new Set() @@ -57,7 +68,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[] } // Process each block - Object.values(state.blocks).forEach((block: any) => { + Object.values(state.blocks).forEach((block: BlockState) => { if (!block?.type) return const blockConfig = getBlock(block.type) @@ -104,8 +115,8 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[] }) }) - // Helper to check visibility, respecting mode and conditions - function isSubBlockVisible(block: any, subBlockConfig: SubBlockConfig): boolean { + /** Helper to check visibility, respecting mode and conditions */ + function isSubBlockVisible(block: BlockState, subBlockConfig: SubBlockConfig): boolean { const mode = subBlockConfig.mode ?? 'both' if (mode === 'trigger' && !block?.triggerMode) return false if (mode === 'basic' && block?.advancedMode) return false @@ -118,7 +129,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[] ? subBlockConfig.condition() : subBlockConfig.condition - const evaluate = (cond: any): boolean => { + const evaluate = (cond: SubBlockCondition): boolean => { const currentValue = block?.subBlocks?.[cond.field]?.value const expected = cond.value @@ -126,7 +137,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[] expected === undefined ? true : Array.isArray(expected) - ? expected.includes(currentValue) + ? expected.includes(currentValue as string) : currentValue === expected if (cond.not) match = !match @@ -161,6 +172,12 @@ function formatFieldName(fieldName: string): string { .join(' ') } +/** Block state with mutable subBlocks for sanitization */ +interface MutableBlockState extends Omit { + subBlocks: Record + data?: Record +} + /** * Remove malformed subBlocks from a block that may have been created by bugs. * This includes subBlocks with: @@ -168,12 +185,12 @@ function formatFieldName(fieldName: string): string { * - Missing required `id` field * - Type "unknown" (indicates malformed data) */ -function removeMalformedSubBlocks(block: any): void { +function removeMalformedSubBlocks(block: MutableBlockState): void { if (!block.subBlocks) return const keysToRemove: string[] = [] - Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => { + Object.entries(block.subBlocks).forEach(([key, subBlock]) => { // Flag subBlocks with invalid keys (literal "undefined" string) if (key === 'undefined') { keysToRemove.push(key) @@ -187,7 +204,8 @@ function removeMalformedSubBlocks(block: any): void { } // Flag subBlocks with type "unknown" (malformed data) - if (subBlock.type === 'unknown') { + // Cast to string for comparison since SubBlockType doesn't include 'unknown' + if ((subBlock.type as string) === 'unknown') { keysToRemove.push(key) return } @@ -204,6 +222,12 @@ function removeMalformedSubBlocks(block: any): void { }) } +/** Sanitized workflow state structure */ +interface SanitizedWorkflowState { + blocks?: Record + [key: string]: unknown +} + /** * Sanitize workflow state by removing all credentials and workspace-specific data * This is used for both template creation and workflow export to ensure consistency @@ -212,18 +236,18 @@ function removeMalformedSubBlocks(block: any): void { * @param options - Options for sanitization behavior */ export function sanitizeWorkflowForSharing( - state: any, + state: Partial | null | undefined, options: { preserveEnvVars?: boolean // Keep {{VAR}} references for export } = {} -): any { - const sanitized = JSON.parse(JSON.stringify(state)) // Deep clone +): SanitizedWorkflowState { + const sanitized = JSON.parse(JSON.stringify(state)) as SanitizedWorkflowState // Deep clone if (!sanitized?.blocks) { return sanitized } - Object.values(sanitized.blocks).forEach((block: any) => { + Object.values(sanitized.blocks).forEach((block: MutableBlockState) => { if (!block?.type) return // First, remove any malformed subBlocks that may have been created by bugs @@ -239,7 +263,7 @@ export function sanitizeWorkflowForSharing( // Clear OAuth credentials (type: 'oauth-input') if (subBlockConfig.type === 'oauth-input') { - block.subBlocks[subBlockConfig.id].value = null + block.subBlocks[subBlockConfig.id]!.value = null } // Clear secret fields (password: true) @@ -247,24 +271,24 @@ export function sanitizeWorkflowForSharing( // Preserve environment variable references if requested if ( options.preserveEnvVars && - typeof subBlock.value === 'string' && + typeof subBlock?.value === 'string' && subBlock.value.startsWith('{{') && subBlock.value.endsWith('}}') ) { // Keep the env var reference } else { - block.subBlocks[subBlockConfig.id].value = null + block.subBlocks[subBlockConfig.id]!.value = null } } // Clear workspace-specific selectors else if (WORKSPACE_SPECIFIC_TYPES.has(subBlockConfig.type)) { - block.subBlocks[subBlockConfig.id].value = null + block.subBlocks[subBlockConfig.id]!.value = null } // Clear workspace-specific fields by ID else if (WORKSPACE_SPECIFIC_FIELDS.has(subBlockConfig.id)) { - block.subBlocks[subBlockConfig.id].value = null + block.subBlocks[subBlockConfig.id]!.value = null } } }) @@ -272,9 +296,9 @@ export function sanitizeWorkflowForSharing( // Process subBlocks without config (fallback) if (block.subBlocks) { - Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => { + Object.entries(block.subBlocks).forEach(([key, subBlock]) => { // Clear workspace-specific fields by key name - if (WORKSPACE_SPECIFIC_FIELDS.has(key)) { + if (WORKSPACE_SPECIFIC_FIELDS.has(key) && subBlock) { subBlock.value = null } }) @@ -282,14 +306,14 @@ export function sanitizeWorkflowForSharing( // Clear data field (for backward compatibility) if (block.data) { - Object.entries(block.data).forEach(([key, value]: [string, any]) => { + Object.entries(block.data).forEach(([key]) => { // Clear anything that looks like credentials if (/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(key)) { - block.data[key] = null + block.data![key] = null } // Clear workspace-specific data if (WORKSPACE_SPECIFIC_FIELDS.has(key)) { - block.data[key] = null + block.data![key] = null } }) } @@ -302,7 +326,9 @@ export function sanitizeWorkflowForSharing( * Sanitize workflow state for templates (removes credentials and workspace data) * Wrapper for backward compatibility */ -export function sanitizeCredentials(state: any): any { +export function sanitizeCredentials( + state: Partial | null | undefined +): SanitizedWorkflowState { return sanitizeWorkflowForSharing(state, { preserveEnvVars: false }) } @@ -310,6 +336,8 @@ export function sanitizeCredentials(state: any): any { * Sanitize workflow state for export (preserves env vars) * Convenience wrapper for workflow export */ -export function sanitizeForExport(state: any): any { +export function sanitizeForExport( + state: Partial | null | undefined +): SanitizedWorkflowState { return sanitizeWorkflowForSharing(state, { preserveEnvVars: true }) } diff --git a/apps/sim/lib/workflows/diff/diff-engine.ts b/apps/sim/lib/workflows/diff/diff-engine.ts index 3efb6831ae..f22365d145 100644 --- a/apps/sim/lib/workflows/diff/diff-engine.ts +++ b/apps/sim/lib/workflows/diff/diff-engine.ts @@ -245,10 +245,10 @@ function computeFieldDiff( const unchangedFields: string[] = [] // Check basic fields - const fieldsToCheck = ['type', 'name', 'enabled', 'triggerMode', 'horizontalHandles'] + const fieldsToCheck = ['type', 'name', 'enabled', 'triggerMode', 'horizontalHandles'] as const for (const field of fieldsToCheck) { - const currentValue = (currentBlock as any)[field] - const proposedValue = (proposedBlock as any)[field] + const currentValue = currentBlock[field] + const proposedValue = proposedBlock[field] if (JSON.stringify(currentValue) !== JSON.stringify(proposedValue)) { changedFields.push(field) } else if (currentValue !== undefined) { @@ -363,7 +363,7 @@ export class WorkflowDiffEngine { } // Call the API route to create the diff - const body: any = { + const body: Record = { jsonContent, currentWorkflowState: mergedBaseline, } @@ -859,7 +859,7 @@ export class WorkflowDiffEngine { const proposedEdgeSet = new Set() // Create edge identifiers for current state (using sim-agent format) - mergedBaseline.edges.forEach((edge: any) => { + mergedBaseline.edges.forEach((edge: Edge) => { const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}` currentEdgeSet.add(edgeId) }) @@ -992,7 +992,7 @@ export class WorkflowDiffEngine { } // Call the API route to merge the diff - const body: any = { + const body: Record = { existingDiff: this.currentDiff, jsonContent, } diff --git a/apps/sim/lib/workflows/executor/execute-workflow.ts b/apps/sim/lib/workflows/executor/execute-workflow.ts index b16e6ea820..ce6f4c2c0d 100644 --- a/apps/sim/lib/workflows/executor/execute-workflow.ts +++ b/apps/sim/lib/workflows/executor/execute-workflow.ts @@ -5,6 +5,7 @@ import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager' import { ExecutionSnapshot } from '@/executor/execution/snapshot' import type { ExecutionMetadata } from '@/executor/execution/types' +import type { ExecutionResult, StreamingExecution } from '@/executor/types' const logger = createLogger('WorkflowExecution') @@ -13,8 +14,8 @@ export interface ExecuteWorkflowOptions { selectedOutputs?: string[] isSecureMode?: boolean workflowTriggerType?: 'api' | 'chat' - onStream?: (streamingExec: any) => Promise - onBlockComplete?: (blockId: string, output: any) => Promise + onStream?: (streamingExec: StreamingExecution) => Promise + onBlockComplete?: (blockId: string, output: unknown) => Promise skipLoggingComplete?: boolean } @@ -29,11 +30,11 @@ export interface WorkflowInfo { export async function executeWorkflow( workflow: WorkflowInfo, requestId: string, - input: any | undefined, + input: unknown | undefined, actorUserId: string, streamConfig?: ExecuteWorkflowOptions, providedExecutionId?: string -): Promise { +): Promise { if (!workflow.workspaceId) { throw new Error(`Workflow ${workflow.id} has no workspaceId`) } @@ -71,7 +72,7 @@ export async function executeWorkflow( callbacks: { onStream: streamConfig?.onStream, onBlockComplete: streamConfig?.onBlockComplete - ? async (blockId: string, _blockName: string, _blockType: string, output: any) => { + ? async (blockId: string, _blockName: string, _blockType: string, output: unknown) => { await streamConfig.onBlockComplete!(blockId, output) } : undefined, @@ -119,7 +120,7 @@ export async function executeWorkflow( } return result - } catch (error: any) { + } catch (error: unknown) { logger.error(`[${requestId}] Workflow execution failed:`, error) throw error } diff --git a/apps/sim/lib/workflows/executor/execution-core.ts b/apps/sim/lib/workflows/executor/execution-core.ts index 9e81d8711a..0eeb946542 100644 --- a/apps/sim/lib/workflows/executor/execution-core.ts +++ b/apps/sim/lib/workflows/executor/execution-core.ts @@ -19,8 +19,12 @@ import { updateWorkflowRunCounts } from '@/lib/workflows/utils' import { Executor } from '@/executor' import { REFERENCE } from '@/executor/constants' import type { ExecutionSnapshot } from '@/executor/execution/snapshot' -import type { ExecutionCallbacks, IterationContext } from '@/executor/execution/types' -import type { ExecutionResult } from '@/executor/types' +import type { + ContextExtensions, + ExecutionCallbacks, + IterationContext, +} from '@/executor/execution/types' +import type { ExecutionResult, NormalizedBlockOutput } from '@/executor/types' import { createEnvVarPattern } from '@/executor/utils/reference-validation' import { Serializer } from '@/serializer' import { mergeSubblockState } from '@/stores/workflows/server-utils' @@ -41,7 +45,7 @@ export interface ExecuteWorkflowCoreOptions { abortSignal?: AbortSignal } -function parseVariableValueByType(value: any, type: string): any { +function parseVariableValueByType(value: unknown, type: string): unknown { if (value === null || value === undefined) { switch (type) { case 'number': @@ -262,7 +266,7 @@ export async function executeWorkflowCore( const filteredEdges = edges // Check if this is a resume execution before trigger resolution - const resumeFromSnapshot = (metadata as any).resumeFromSnapshot === true + const resumeFromSnapshot = metadata.resumeFromSnapshot === true const resumePendingQueue = snapshot.state?.pendingQueue let resolvedTriggerBlockId = triggerBlockId @@ -321,7 +325,7 @@ export async function executeWorkflowCore( blockId: string, blockName: string, blockType: string, - output: any, + output: { input?: unknown; output: NormalizedBlockOutput; executionTime: number }, iterationContext?: IterationContext ) => { await loggingSession.onBlockComplete(blockId, blockName, blockType, output) @@ -330,7 +334,7 @@ export async function executeWorkflowCore( } } - const contextExtensions: any = { + const contextExtensions: ContextExtensions = { stream: !!onStream, selectedOutputs, executionId, @@ -342,7 +346,12 @@ export async function executeWorkflowCore( onStream, resumeFromSnapshot, resumePendingQueue, - remainingEdges: snapshot.state?.remainingEdges, + remainingEdges: snapshot.state?.remainingEdges?.map((edge) => ({ + source: edge.source, + target: edge.target, + sourceHandle: edge.sourceHandle ?? undefined, + targetHandle: edge.targetHandle ?? undefined, + })), dagIncomingEdges: snapshot.state?.dagIncomingEdges, snapshotState: snapshot.state, metadata, @@ -363,7 +372,7 @@ export async function executeWorkflowCore( // Convert initial workflow variables to their native types if (workflowVariables) { for (const [varId, variable] of Object.entries(workflowVariables)) { - const v = variable as any + const v = variable as { value?: unknown; type?: string } if (v.value !== undefined && v.type) { v.value = parseVariableValueByType(v.value, v.type) } @@ -432,18 +441,23 @@ export async function executeWorkflowCore( }) return result - } catch (error: any) { + } catch (error: unknown) { logger.error(`[${requestId}] Execution failed:`, error) - const executionResult = (error as any)?.executionResult + const errorWithResult = error as { + executionResult?: ExecutionResult + message?: string + stack?: string + } + const executionResult = errorWithResult?.executionResult const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] } await loggingSession.safeCompleteWithError({ endedAt: new Date().toISOString(), totalDurationMs: executionResult?.metadata?.duration || 0, error: { - message: error.message || 'Execution failed', - stackTrace: error.stack, + message: errorWithResult?.message || 'Execution failed', + stackTrace: errorWithResult?.stack, }, traceSpans, }) diff --git a/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts b/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts index 5c10de594b..f695e8dc69 100644 --- a/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts +++ b/apps/sim/lib/workflows/executor/human-in-the-loop-manager.ts @@ -2,13 +2,14 @@ import { randomUUID } from 'crypto' import { db } from '@sim/db' import { pausedExecutions, resumeQueue, workflowExecutionLogs } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, asc, desc, eq, inArray, lt, sql } from 'drizzle-orm' +import { and, asc, desc, eq, inArray, lt, type SQL, sql } from 'drizzle-orm' import type { Edge } from 'reactflow' import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { ExecutionSnapshot } from '@/executor/execution/snapshot' import type { ExecutionResult, PausePoint, SerializedSnapshot } from '@/executor/types' +import type { SerializedConnection } from '@/serializer/types' const logger = createLogger('HumanInTheLoopManager') @@ -18,7 +19,7 @@ interface ResumeQueueEntrySummary { parentExecutionId: string newExecutionId: string contextId: string - resumeInput: any + resumeInput: unknown status: string queuedAt: string | null claimedAt: string | null @@ -69,7 +70,7 @@ interface PersistPauseResultArgs { interface EnqueueResumeArgs { executionId: string contextId: string - resumeInput: any + resumeInput: unknown userId: string } @@ -85,7 +86,7 @@ type EnqueueResumeResult = resumeEntryId: string pausedExecution: typeof pausedExecutions.$inferSelect contextId: string - resumeInput: any + resumeInput: unknown userId: string } @@ -94,7 +95,7 @@ interface StartResumeExecutionArgs { resumeExecutionId: string pausedExecution: typeof pausedExecutions.$inferSelect contextId: string - resumeInput: any + resumeInput: unknown userId: string } @@ -365,7 +366,7 @@ export class PauseResumeManager { resumeExecutionId: string pausedExecution: typeof pausedExecutions.$inferSelect contextId: string - resumeInput: any + resumeInput: unknown userId: string }): Promise { const { resumeExecutionId, pausedExecution, contextId, resumeInput, userId } = args @@ -408,9 +409,8 @@ export class PauseResumeManager { const rawPauseBlockId = pausePoint.blockId ?? contextId const pauseBlockId = PauseResumeManager.normalizePauseBlockId(rawPauseBlockId) - const dagIncomingEdgesFromSnapshot: Record | undefined = ( - baseSnapshot.state as any - )?.dagIncomingEdges + const dagIncomingEdgesFromSnapshot: Record | undefined = + baseSnapshot.state?.dagIncomingEdges const downstreamBlocks = dagIncomingEdgesFromSnapshot ? Object.entries(dagIncomingEdgesFromSnapshot) @@ -424,9 +424,10 @@ export class PauseResumeManager { .map(([nodeId]) => nodeId) : baseSnapshot.workflow.connections .filter( - (conn: any) => PauseResumeManager.normalizePauseBlockId(conn.source) === pauseBlockId + (conn: SerializedConnection) => + PauseResumeManager.normalizePauseBlockId(conn.source) === pauseBlockId ) - .map((conn: any) => conn.target) + .map((conn: SerializedConnection) => conn.target) logger.info('Found downstream blocks', { pauseBlockId, @@ -448,7 +449,7 @@ export class PauseResumeManager { if (stateCopy) { const dagIncomingEdges: Record | undefined = - (stateCopy as any)?.dagIncomingEdges || dagIncomingEdgesFromSnapshot + stateCopy.dagIncomingEdges || dagIncomingEdgesFromSnapshot // Calculate the pause duration (time from pause to resume) const pauseDurationMs = pausedExecution.pausedAt @@ -617,11 +618,11 @@ export class PauseResumeManager { // If we didn't find any edges via the DAG snapshot, fall back to workflow connections if (edgesToRemove.length === 0 && baseSnapshot.workflow.connections?.length) { edgesToRemove = baseSnapshot.workflow.connections - .filter((conn: any) => + .filter((conn: SerializedConnection) => completedPauseContexts.has(PauseResumeManager.normalizePauseBlockId(conn.source)) ) - .map((conn: any) => ({ - id: conn.id ?? `${conn.source}→${conn.target}`, + .map((conn: SerializedConnection) => ({ + id: `${conn.source}→${conn.target}`, source: conn.source, target: conn.target, sourceHandle: conn.sourceHandle, @@ -630,11 +631,11 @@ export class PauseResumeManager { } } else { edgesToRemove = baseSnapshot.workflow.connections - .filter((conn: any) => + .filter((conn: SerializedConnection) => completedPauseContexts.has(PauseResumeManager.normalizePauseBlockId(conn.source)) ) - .map((conn: any) => ({ - id: conn.id ?? `${conn.source}→${conn.target}`, + .map((conn: SerializedConnection) => ({ + id: `${conn.source}→${conn.target}`, source: conn.source, target: conn.target, sourceHandle: conn.sourceHandle, @@ -913,7 +914,7 @@ export class PauseResumeManager { }): Promise { const { workflowId, status } = options - let whereClause: any = eq(pausedExecutions.workflowId, workflowId) + let whereClause: SQL | undefined = eq(pausedExecutions.workflowId, workflowId) if (status) { const statuses = Array.isArray(status) @@ -924,7 +925,7 @@ export class PauseResumeManager { if (statuses.length === 1) { whereClause = and(whereClause, eq(pausedExecutions.status, statuses[0])) } else if (statuses.length > 1) { - whereClause = and(whereClause, inArray(pausedExecutions.status, statuses as any)) + whereClause = and(whereClause, inArray(pausedExecutions.status, statuses)) } } @@ -1129,16 +1130,16 @@ export class PauseResumeManager { } private static mapPausePoints( - pausePoints: any, + pausePoints: unknown, queuePositions?: Map, latestEntries?: Map ): PausePointWithQueue[] { - const record = pausePoints as Record + const record = pausePoints as Record | null if (!record) { return [] } - return Object.values(record).map((point: any) => { + return Object.values(record).map((point: PausePoint) => { const queuePosition = queuePositions?.get(point.contextId ?? '') ?? null const latestEntry = latestEntries?.get(point.contextId ?? '') diff --git a/apps/sim/lib/workflows/operations/import-export.ts b/apps/sim/lib/workflows/operations/import-export.ts index f5dbd52a92..b446ea1083 100644 --- a/apps/sim/lib/workflows/operations/import-export.ts +++ b/apps/sim/lib/workflows/operations/import-export.ts @@ -1,7 +1,7 @@ import { createLogger } from '@sim/logger' import JSZip from 'jszip' import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer' -import type { WorkflowState } from '@/stores/workflows/workflow/types' +import type { Variable, WorkflowState } from '@/stores/workflows/workflow/types' const logger = createLogger('WorkflowImportExport') @@ -14,12 +14,7 @@ export interface WorkflowExportData { folderId?: string | null } state: WorkflowState - variables?: Array<{ - id: string - name: string - type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain' - value: any - }> + variables?: Record } export interface FolderExportData { diff --git a/apps/sim/lib/workflows/persistence/utils.ts b/apps/sim/lib/workflows/persistence/utils.ts index b115321202..d6ccaa90f9 100644 --- a/apps/sim/lib/workflows/persistence/utils.ts +++ b/apps/sim/lib/workflows/persistence/utils.ts @@ -9,7 +9,7 @@ import { workflowSubflows, } from '@sim/db' import { createLogger } from '@sim/logger' -import type { InferSelectModel } from 'drizzle-orm' +import type { InferInsertModel, InferSelectModel } from 'drizzle-orm' import { and, desc, eq, sql } from 'drizzle-orm' import type { Edge } from 'reactflow' import { v4 as uuidv4 } from 'uuid' @@ -22,6 +22,8 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w const logger = createLogger('WorkflowDBHelpers') export type WorkflowDeploymentVersion = InferSelectModel +type WebhookRecord = InferSelectModel +type SubflowInsert = InferInsertModel export interface WorkflowDeploymentVersionResponse { id: string @@ -43,7 +45,7 @@ export interface NormalizedWorkflowData { export interface DeployedWorkflowData extends NormalizedWorkflowData { deploymentVersionId: string - variables?: Record + variables?: Record } export async function blockExistsInDeployment( @@ -96,7 +98,7 @@ export async function loadDeployedWorkflowState(workflowId: string): Promise } + const state = active.state as WorkflowState & { variables?: Record } return { blocks: state.blocks || {}, @@ -336,7 +338,7 @@ export async function saveWorkflowToNormalizedTables( // Start a transaction await db.transaction(async (tx) => { // Snapshot existing webhooks before deletion to preserve them through the cycle - let existingWebhooks: any[] = [] + let existingWebhooks: WebhookRecord[] = [] try { existingWebhooks = await tx.select().from(webhook).where(eq(webhook.workflowId, workflowId)) } catch (webhookError) { @@ -392,7 +394,7 @@ export async function saveWorkflowToNormalizedTables( } // Insert subflows (loops and parallels) - const subflowInserts: any[] = [] + const subflowInserts: SubflowInsert[] = [] // Add loops Object.values(canonicalLoops).forEach((loop) => { @@ -571,7 +573,7 @@ export async function deployWorkflow(params: { const blockTypeCounts: Record = {} for (const block of Object.values(currentState.blocks)) { - const blockType = (block as any).type || 'unknown' + const blockType = block.type || 'unknown' blockTypeCounts[blockType] = (blockTypeCounts[blockType] || 0) + 1 } @@ -605,11 +607,33 @@ export async function deployWorkflow(params: { } } +/** Input state for ID regeneration - partial to handle external sources */ +export interface RegenerateStateInput { + blocks?: Record + edges?: Edge[] + loops?: Record + parallels?: Record + lastSaved?: number + variables?: Record + metadata?: Record +} + +/** Output state after ID regeneration */ +interface RegenerateStateOutput { + blocks: Record + edges: Edge[] + loops: Record + parallels: Record + lastSaved: number + variables?: Record + metadata?: Record +} + /** * Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates * Returns a new state with all IDs regenerated and references updated */ -export function regenerateWorkflowStateIds(state: any): any { +export function regenerateWorkflowStateIds(state: RegenerateStateInput): RegenerateStateOutput { // Create ID mappings const blockIdMapping = new Map() const edgeIdMapping = new Map() @@ -624,7 +648,7 @@ export function regenerateWorkflowStateIds(state: any): any { // Map edge IDs - ;(state.edges || []).forEach((edge: any) => { + ;(state.edges || []).forEach((edge: Edge) => { edgeIdMapping.set(edge.id, crypto.randomUUID()) }) @@ -639,28 +663,28 @@ export function regenerateWorkflowStateIds(state: any): any { }) // Second pass: Create new state with regenerated IDs and updated references - const newBlocks: Record = {} - const newEdges: any[] = [] - const newLoops: Record = {} - const newParallels: Record = {} + const newBlocks: Record = {} + const newEdges: Edge[] = [] + const newLoops: Record = {} + const newParallels: Record = {} // Regenerate blocks with updated references - Object.entries(state.blocks || {}).forEach(([oldId, block]: [string, any]) => { + Object.entries(state.blocks || {}).forEach(([oldId, block]) => { const newId = blockIdMapping.get(oldId)! - const newBlock = { ...block, id: newId } + const newBlock: BlockState = { ...block, id: newId } // Update parentId reference if it exists if (newBlock.data?.parentId) { const newParentId = blockIdMapping.get(newBlock.data.parentId) if (newParentId) { - newBlock.data.parentId = newParentId + newBlock.data = { ...newBlock.data, parentId: newParentId } } } // Update any block references in subBlocks if (newBlock.subBlocks) { - const updatedSubBlocks: Record = {} - Object.entries(newBlock.subBlocks).forEach(([subId, subBlock]: [string, any]) => { + const updatedSubBlocks: Record = {} + Object.entries(newBlock.subBlocks).forEach(([subId, subBlock]) => { const updatedSubBlock = { ...subBlock } // If subblock value contains block references, update them @@ -668,7 +692,7 @@ export function regenerateWorkflowStateIds(state: any): any { typeof updatedSubBlock.value === 'string' && blockIdMapping.has(updatedSubBlock.value) ) { - updatedSubBlock.value = blockIdMapping.get(updatedSubBlock.value) + updatedSubBlock.value = blockIdMapping.get(updatedSubBlock.value) ?? updatedSubBlock.value } updatedSubBlocks[subId] = updatedSubBlock @@ -681,7 +705,7 @@ export function regenerateWorkflowStateIds(state: any): any { // Regenerate edges with updated source/target references - ;(state.edges || []).forEach((edge: any) => { + ;(state.edges || []).forEach((edge: Edge) => { const newId = edgeIdMapping.get(edge.id)! const newSource = blockIdMapping.get(edge.source) || edge.source const newTarget = blockIdMapping.get(edge.target) || edge.target @@ -695,9 +719,9 @@ export function regenerateWorkflowStateIds(state: any): any { }) // Regenerate loops with updated node references - Object.entries(state.loops || {}).forEach(([oldId, loop]: [string, any]) => { + Object.entries(state.loops || {}).forEach(([oldId, loop]) => { const newId = loopIdMapping.get(oldId)! - const newLoop = { ...loop, id: newId } + const newLoop: Loop = { ...loop, id: newId } // Update nodes array with new block IDs if (newLoop.nodes) { @@ -708,9 +732,9 @@ export function regenerateWorkflowStateIds(state: any): any { }) // Regenerate parallels with updated node references - Object.entries(state.parallels || {}).forEach(([oldId, parallel]: [string, any]) => { + Object.entries(state.parallels || {}).forEach(([oldId, parallel]) => { const newId = parallelIdMapping.get(oldId)! - const newParallel = { ...parallel, id: newId } + const newParallel: Parallel = { ...parallel, id: newId } // Update nodes array with new block IDs if (newParallel.nodes) { diff --git a/apps/sim/lib/workflows/sanitization/json-sanitizer.ts b/apps/sim/lib/workflows/sanitization/json-sanitizer.ts index eb062599f0..8ee5b01957 100644 --- a/apps/sim/lib/workflows/sanitization/json-sanitizer.ts +++ b/apps/sim/lib/workflows/sanitization/json-sanitizer.ts @@ -59,26 +59,36 @@ export interface ExportWorkflowState { id: string name: string type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain' - value: any + value: unknown }> } } +/** Condition structure for sanitization */ +interface SanitizedCondition { + id: string + title: string + value: string +} + /** * Sanitize condition blocks by removing UI-specific metadata * Returns cleaned JSON string (not parsed array) */ function sanitizeConditions(conditionsJson: string): string { try { - const conditions = JSON.parse(conditionsJson) + const conditions: unknown = JSON.parse(conditionsJson) if (!Array.isArray(conditions)) return conditionsJson // Keep only id, title, and value - remove UI state - const cleaned = conditions.map((cond: any) => ({ - id: cond.id, - title: cond.title, - value: cond.value || '', - })) + const cleaned: SanitizedCondition[] = conditions.map((cond: unknown) => { + const condition = cond as Record + return { + id: String(condition.id ?? ''), + title: String(condition.title ?? ''), + value: String(condition.value ?? ''), + } + }) return JSON.stringify(cleaned) } catch { @@ -86,11 +96,50 @@ function sanitizeConditions(conditionsJson: string): string { } } +/** Tool input structure for sanitization */ +interface ToolInput { + type: string + customToolId?: string + schema?: { + type?: string + function?: { + name: string + description?: string + parameters?: unknown + } + } + code?: string + title?: string + toolId?: string + usageControl?: string + isExpanded?: boolean + [key: string]: unknown +} + +/** Sanitized tool output structure */ +interface SanitizedTool { + type: string + customToolId?: string + usageControl?: string + title?: string + toolId?: string + schema?: { + type: string + function: { + name: string + description?: string + parameters?: unknown + } + } + code?: string + [key: string]: unknown +} + /** * Sanitize tools array by removing UI state and redundant fields */ -function sanitizeTools(tools: any[]): any[] { - return tools.map((tool) => { +function sanitizeTools(tools: ToolInput[]): SanitizedTool[] { + return tools.map((tool): SanitizedTool => { if (tool.type === 'custom-tool') { // New reference format: minimal fields only if (tool.customToolId && !tool.schema && !tool.code) { @@ -102,7 +151,7 @@ function sanitizeTools(tools: any[]): any[] { } // Legacy inline format: include all fields - const sanitized: any = { + const sanitized: SanitizedTool = { type: tool.type, title: tool.title, toolId: tool.toolId, @@ -129,23 +178,24 @@ function sanitizeTools(tools: any[]): any[] { return sanitized } - const { isExpanded, ...cleanTool } = tool - return cleanTool + const { isExpanded: _isExpanded, ...cleanTool } = tool + return cleanTool as SanitizedTool }) } /** * Sort object keys recursively for consistent comparison */ -function sortKeysRecursively(item: any): any { +function sortKeysRecursively(item: unknown): unknown { if (Array.isArray(item)) { return item.map(sortKeysRecursively) } if (item !== null && typeof item === 'object') { - return Object.keys(item) + const obj = item as Record + return Object.keys(obj) .sort() - .reduce((result: any, key: string) => { - result[key] = sortKeysRecursively(item[key]) + .reduce((result: Record, key: string) => { + result[key] = sortKeysRecursively(obj[key]) return result }, {}) } @@ -183,7 +233,7 @@ function sanitizeSubBlocks( // Sort keys for consistent comparison if (obj && typeof obj === 'object') { - sanitized[key] = sortKeysRecursively(obj) + sanitized[key] = sortKeysRecursively(obj) as Record return } } catch { @@ -201,7 +251,7 @@ function sanitizeSubBlocks( } if (key === 'tools' && Array.isArray(subBlock.value)) { - sanitized[key] = sanitizeTools(subBlock.value) + sanitized[key] = sanitizeTools(subBlock.value as unknown as ToolInput[]) return } @@ -383,7 +433,7 @@ export function sanitizeForExport(state: WorkflowState): ExportWorkflowState { // Use unified sanitization with env var preservation for export const sanitizedState = sanitizeWorkflowForSharing(fullState, { preserveEnvVars: true, // Keep {{ENV_VAR}} references in exported workflows - }) + }) as ExportWorkflowState['state'] return { version: '1.0', diff --git a/apps/sim/lib/workflows/sanitization/validation.ts b/apps/sim/lib/workflows/sanitization/validation.ts index 75e9ef5639..4c25d19981 100644 --- a/apps/sim/lib/workflows/sanitization/validation.ts +++ b/apps/sim/lib/workflows/sanitization/validation.ts @@ -1,20 +1,40 @@ import { createLogger } from '@sim/logger' import { getBlock } from '@/blocks/registry' import { isCustomTool, isMcpTool } from '@/executor/constants' -import type { WorkflowState } from '@/stores/workflows/workflow/types' +import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types' import { getTool } from '@/tools/utils' const logger = createLogger('WorkflowValidation') +/** Tool structure for validation */ +interface AgentTool { + type: string + customToolId?: string + schema?: { + type?: string + function?: { + name?: string + parameters?: { + type?: string + properties?: Record + } + } + } + code?: string + usageControl?: string + [key: string]: unknown +} + /** * Checks if a custom tool has a valid inline schema */ -function isValidCustomToolSchema(tool: any): boolean { +function isValidCustomToolSchema(tool: unknown): boolean { try { if (!tool || typeof tool !== 'object') return false - if (tool.type !== 'custom-tool') return true // non-custom tools are validated elsewhere + const t = tool as AgentTool + if (t.type !== 'custom-tool') return true // non-custom tools are validated elsewhere - const schema = tool.schema + const schema = t.schema if (!schema || typeof schema !== 'object') return false const fn = schema.function if (!fn || typeof fn !== 'object') return false @@ -34,14 +54,15 @@ function isValidCustomToolSchema(tool: any): boolean { /** * Checks if a custom tool is a valid reference-only format (new format) */ -function isValidCustomToolReference(tool: any): boolean { +function isValidCustomToolReference(tool: unknown): boolean { try { if (!tool || typeof tool !== 'object') return false - if (tool.type !== 'custom-tool') return false + const t = tool as AgentTool + if (t.type !== 'custom-tool') return false // Reference format: has customToolId but no inline schema/code // This is valid - the tool will be loaded dynamically during execution - if (tool.customToolId && typeof tool.customToolId === 'string') { + if (t.customToolId && typeof t.customToolId === 'string') { return true } @@ -51,14 +72,14 @@ function isValidCustomToolReference(tool: any): boolean { } } -export function sanitizeAgentToolsInBlocks(blocks: Record): { - blocks: Record +export function sanitizeAgentToolsInBlocks(blocks: Record): { + blocks: Record warnings: string[] } { const warnings: string[] = [] // Shallow clone to avoid mutating callers - const sanitizedBlocks: Record = { ...blocks } + const sanitizedBlocks: Record = { ...blocks } for (const [blockId, block] of Object.entries(sanitizedBlocks)) { try { @@ -90,10 +111,11 @@ export function sanitizeAgentToolsInBlocks(blocks: Record): { const originalLength = value.length const cleaned = value - .filter((tool: any) => { + .filter((tool: unknown) => { // Allow non-custom tools to pass through as-is if (!tool || typeof tool !== 'object') return false - if (tool.type !== 'custom-tool') return true + const t = tool as AgentTool + if (t.type !== 'custom-tool') return true // Check if it's a valid reference-only format (new format) if (isValidCustomToolReference(tool)) { @@ -106,21 +128,22 @@ export function sanitizeAgentToolsInBlocks(blocks: Record): { logger.warn('Removing invalid custom tool from workflow', { blockId, blockName: block.name, - hasCustomToolId: !!tool.customToolId, - hasSchema: !!tool.schema, + hasCustomToolId: !!t.customToolId, + hasSchema: !!t.schema, }) } return ok }) - .map((tool: any) => { - if (tool.type === 'custom-tool') { + .map((tool: unknown) => { + const t = tool as AgentTool + if (t.type === 'custom-tool') { // For reference-only tools, ensure usageControl default - if (!tool.usageControl) { - tool.usageControl = 'auto' + if (!t.usageControl) { + t.usageControl = 'auto' } // For inline tools (legacy), also ensure code default - if (!tool.customToolId && (!tool.code || typeof tool.code !== 'string')) { - tool.code = '' + if (!t.customToolId && (!t.code || typeof t.code !== 'string')) { + t.code = '' } } return tool @@ -132,13 +155,14 @@ export function sanitizeAgentToolsInBlocks(blocks: Record): { ) } - toolsSubBlock.value = cleaned + // Cast cleaned to the expected SubBlockState value type + // The value is a tools array but SubBlockState.value is typed narrowly + toolsSubBlock.value = cleaned as unknown as typeof toolsSubBlock.value // Reassign in case caller uses object identity sanitizedBlocks[blockId] = { ...block, subBlocks: { ...subBlocks, tools: toolsSubBlock } } - } catch (err: any) { - warnings.push( - `Block ${block?.name || blockId}: tools sanitation failed: ${err?.message || String(err)}` - ) + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err) + warnings.push(`Block ${block?.name || blockId}: tools sanitation failed: ${message}`) } } @@ -177,7 +201,7 @@ export function validateWorkflowState( } // Validate each block - const sanitizedBlocks: Record = {} + const sanitizedBlocks: Record = {} let hasChanges = false for (const [blockId, block] of Object.entries(workflowState.blocks)) { diff --git a/apps/sim/lib/workflows/streaming/streaming.ts b/apps/sim/lib/workflows/streaming/streaming.ts index 6a12d78722..b1fe64b637 100644 --- a/apps/sim/lib/workflows/streaming/streaming.ts +++ b/apps/sim/lib/workflows/streaming/streaming.ts @@ -8,7 +8,15 @@ import { encodeSSE } from '@/lib/core/utils/sse' import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans' import { processStreamingBlockLogs } from '@/lib/tokenization' import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow' -import type { ExecutionResult } from '@/executor/types' +import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types' + +/** + * Extended streaming execution type that includes blockId on the execution. + * The runtime passes blockId but the base StreamingExecution type doesn't declare it. + */ +interface StreamingExecutionWithBlockId extends Omit { + execution?: StreamingExecution['execution'] & { blockId?: string } +} const logger = createLogger('WorkflowStreaming') @@ -27,9 +35,9 @@ export interface StreamingResponseOptions { userId: string workspaceId?: string | null isDeployed?: boolean - variables?: Record + variables?: Record } - input: any + input: unknown executingUserId: string streamConfig: StreamingConfig executionId?: string @@ -41,7 +49,7 @@ interface StreamingState { streamCompletionTimes: Map } -function extractOutputValue(output: any, path: string): any { +function extractOutputValue(output: unknown, path: string): unknown { return traverseObjectPath(output, path) } @@ -54,11 +62,11 @@ function buildMinimalResult( selectedOutputs: string[] | undefined, streamedContent: Map, requestId: string -): { success: boolean; error?: string; output: Record } { +): { success: boolean; error?: string; output: Record } { const minimalResult = { success: result.success, error: result.error, - output: {} as Record, + output: {} as Record, } if (!selectedOutputs?.length) { @@ -88,7 +96,7 @@ function buildMinimalResult( continue } - const blockLog = result.logs.find((log: any) => log.blockId === blockId) + const blockLog = result.logs.find((log: BlockLog) => log.blockId === blockId) if (!blockLog?.output) { continue } @@ -99,16 +107,16 @@ function buildMinimalResult( } if (!minimalResult.output[blockId]) { - minimalResult.output[blockId] = Object.create(null) + minimalResult.output[blockId] = Object.create(null) as Record } - minimalResult.output[blockId][path] = value + ;(minimalResult.output[blockId] as Record)[path] = value } return minimalResult } -function updateLogsWithStreamedContent(logs: any[], state: StreamingState): any[] { - return logs.map((log: any) => { +function updateLogsWithStreamedContent(logs: BlockLog[], state: StreamingState): BlockLog[] { + return logs.map((log: BlockLog) => { if (!state.streamedContent.has(log.blockId)) { return log } @@ -168,10 +176,10 @@ export async function createStreamingResponse( state.processedOutputs.add(blockId) } - const onStreamCallback = async (streamingExec: { - stream: ReadableStream - execution?: { blockId?: string } - }) => { + /** + * Callback for handling streaming execution events. + */ + const onStreamCallback = async (streamingExec: StreamingExecutionWithBlockId) => { const blockId = streamingExec.execution?.blockId if (!blockId) { logger.warn(`[${requestId}] Streaming execution missing blockId`) @@ -215,7 +223,7 @@ export async function createStreamingResponse( } } - const onBlockCompleteCallback = async (blockId: string, output: any) => { + const onBlockCompleteCallback = async (blockId: string, output: unknown) => { if (!streamConfig.selectedOutputs?.length) { return } diff --git a/apps/sim/lib/workflows/training/compute-edit-sequence.ts b/apps/sim/lib/workflows/training/compute-edit-sequence.ts index b50ce49211..da9798d560 100644 --- a/apps/sim/lib/workflows/training/compute-edit-sequence.ts +++ b/apps/sim/lib/workflows/training/compute-edit-sequence.ts @@ -1,4 +1,7 @@ -import type { CopilotWorkflowState } from '@/lib/workflows/sanitization/json-sanitizer' +import type { + CopilotBlockState, + CopilotWorkflowState, +} from '@/lib/workflows/sanitization/json-sanitizer' import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants' export interface EditOperation { @@ -7,13 +10,12 @@ export interface EditOperation { params?: { type?: string name?: string - outputs?: Record enabled?: boolean triggerMode?: boolean advancedMode?: boolean - inputs?: Record - connections?: Record - nestedNodes?: Record + inputs?: Record + connections?: Record + nestedNodes?: Record subflowId?: string } } @@ -34,11 +36,11 @@ export interface WorkflowDiff { * Returns map of blockId -> {block, parentId} */ function flattenBlocks( - blocks: Record -): Record { - const flattened: Record = {} + blocks: Record +): Record { + const flattened: Record = {} - const processBlock = (blockId: string, block: any, parentId?: string) => { + const processBlock = (blockId: string, block: CopilotBlockState, parentId?: string) => { flattened[blockId] = { block, parentId } // Recursively process nested nodes @@ -56,23 +58,20 @@ function flattenBlocks( return flattened } -/** - * Extract all edges from blocks with embedded connections (including nested) - */ -function extractAllEdgesFromBlocks(blocks: Record): Array<{ +interface ExtractedEdge { source: string target: string sourceHandle?: string | null targetHandle?: string | null -}> { - const edges: Array<{ - source: string - target: string - sourceHandle?: string | null - targetHandle?: string | null - }> = [] - - const processBlockConnections = (block: any, blockId: string) => { +} + +/** + * Extract all edges from blocks with embedded connections (including nested) + */ +function extractAllEdgesFromBlocks(blocks: Record): ExtractedEdge[] { + const edges: ExtractedEdge[] = [] + + const processBlockConnections = (block: CopilotBlockState, blockId: string) => { if (block.connections) { Object.entries(block.connections).forEach(([sourceHandle, targets]) => { const targetArray = Array.isArray(targets) ? targets : [targets] @@ -191,7 +190,6 @@ export function computeEditSequence( subflowId: parentId, type: block.type, name: block.name, - outputs: block.outputs, enabled: block.enabled !== undefined ? block.enabled : true, } @@ -296,7 +294,6 @@ export function computeEditSequence( subflowId: endParentId, type: endBlock.type, name: endBlock.name, - outputs: endBlock.outputs, enabled: endBlock.enabled !== undefined ? endBlock.enabled : true, } @@ -359,33 +356,22 @@ export function computeEditSequence( * Extract input values from a block * Works with sanitized format where inputs is Record */ -function extractInputValues(block: any): Record { +function extractInputValues(block: CopilotBlockState): Record { // New sanitized format uses 'inputs' field if (block.inputs) { return { ...block.inputs } } - // Fallback for any legacy data - if (block.subBlocks) { - return { ...block.subBlocks } - } - return {} } +type ConnectionTarget = string | { block: string; handle: string } + /** * Extract connections for a specific block from edges */ -function extractConnections( - blockId: string, - edges: Array<{ - source: string - target: string - sourceHandle?: string | null - targetHandle?: string | null - }> -): Record { - const connections: Record = {} +function extractConnections(blockId: string, edges: ExtractedEdge[]): Record { + const connections: Record = {} // Find all edges where this block is the source const outgoingEdges = edges.filter((edge) => edge.source === blockId) @@ -410,36 +396,29 @@ function extractConnections( } // Simplify single-element arrays to just the element + const result: Record = {} for (const handle in connections) { - if (Array.isArray(connections[handle]) && connections[handle].length === 1) { - connections[handle] = connections[handle][0] + if (connections[handle].length === 1) { + result[handle] = connections[handle][0] + } else { + result[handle] = connections[handle] } } - return connections + return result } /** * Compute what changed in a block between two states */ function computeBlockChanges( - startBlock: any, - endBlock: any, + startBlock: CopilotBlockState, + endBlock: CopilotBlockState, blockId: string, - startEdges: Array<{ - source: string - target: string - sourceHandle?: string | null - targetHandle?: string | null - }>, - endEdges: Array<{ - source: string - target: string - sourceHandle?: string | null - targetHandle?: string | null - }> -): Record | null { - const changes: Record = {} + startEdges: ExtractedEdge[], + endEdges: ExtractedEdge[] +): Record | null { + const changes: Record = {} let hasChanges = false // Check type change @@ -497,10 +476,10 @@ function computeBlockChanges( * Only returns fields that actually changed or were added */ function computeInputDelta( - startInputs: Record, - endInputs: Record -): Record { - const delta: Record = {} + startInputs: Record, + endInputs: Record +): Record { + const delta: Record = {} for (const key in endInputs) { if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) { diff --git a/apps/sim/lib/workflows/triggers/trigger-utils.ts b/apps/sim/lib/workflows/triggers/trigger-utils.ts index 4601f9f32c..af7a919f04 100644 --- a/apps/sim/lib/workflows/triggers/trigger-utils.ts +++ b/apps/sim/lib/workflows/triggers/trigger-utils.ts @@ -6,6 +6,7 @@ import { } from '@/lib/workflows/triggers/triggers' import { getAllBlocks, getBlock } from '@/blocks' import type { BlockConfig } from '@/blocks/types' +import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types' import { getTrigger } from '@/triggers' const logger = createLogger('TriggerUtils') @@ -34,12 +35,12 @@ export function isValidStartBlockType(blockType: string): blockType is ValidStar /** * Check if a workflow state has a valid start block */ -export function hasValidStartBlockInState(state: any): boolean { +export function hasValidStartBlockInState(state: WorkflowState | null | undefined): boolean { if (!state?.blocks) { return false } - const startBlock = Object.values(state.blocks).find((block: any) => { + const startBlock = Object.values(state.blocks).find((block: BlockState) => { const blockType = block?.type return isValidStartBlockType(blockType) }) @@ -50,7 +51,7 @@ export function hasValidStartBlockInState(state: any): boolean { /** * Generates mock data based on the output type definition */ -function generateMockValue(type: string, description?: string, fieldName?: string): any { +function generateMockValue(type: string, _description?: string, fieldName?: string): unknown { const name = fieldName || 'value' switch (type) { @@ -88,18 +89,19 @@ function generateMockValue(type: string, description?: string, fieldName?: strin /** * Recursively processes nested output structures */ -function processOutputField(key: string, field: any, depth = 0, maxDepth = 10): any { +function processOutputField(key: string, field: unknown, depth = 0, maxDepth = 10): unknown { // Prevent infinite recursion if (depth > maxDepth) { return null } if (field && typeof field === 'object' && 'type' in field) { - return generateMockValue(field.type, field.description, key) + const typedField = field as { type: string; description?: string } + return generateMockValue(typedField.type, typedField.description, key) } if (field && typeof field === 'object' && !Array.isArray(field)) { - const nestedObject: Record = {} + const nestedObject: Record = {} for (const [nestedKey, nestedField] of Object.entries(field)) { nestedObject[nestedKey] = processOutputField(nestedKey, nestedField, depth + 1, maxDepth) } @@ -112,8 +114,8 @@ function processOutputField(key: string, field: any, depth = 0, maxDepth = 10): /** * Generates mock payload from outputs object */ -function generateMockPayloadFromOutputs(outputs: Record): Record { - const mockPayload: Record = {} +function generateMockPayloadFromOutputs(outputs: Record): Record { + const mockPayload: Record = {} for (const [key, output] of Object.entries(outputs)) { if (key === 'visualization') { @@ -129,8 +131,8 @@ function generateMockPayloadFromOutputs(outputs: Record): Record -): Record { + outputs: Record +): Record { return generateMockPayloadFromOutputs(outputs) } @@ -395,8 +397,8 @@ export function triggerNeedsMockPayload( */ export function extractTriggerMockPayload< T extends { type: string; subBlocks?: Record }, ->(trigger: StartBlockCandidate): any { - const subBlocks = trigger.block.subBlocks as Record | undefined +>(trigger: StartBlockCandidate): unknown { + const subBlocks = trigger.block.subBlocks as Record | undefined // Determine the trigger ID let triggerId: string diff --git a/apps/sim/lib/workflows/variables/variable-manager.ts b/apps/sim/lib/workflows/variables/variable-manager.ts index d2db3bd109..04ed5b9e49 100644 --- a/apps/sim/lib/workflows/variables/variable-manager.ts +++ b/apps/sim/lib/workflows/variables/variable-manager.ts @@ -16,7 +16,11 @@ export class VariableManager { * @param forExecution Whether this conversion is for execution (true) or storage/display (false) * @returns The value converted to its appropriate type */ - private static convertToNativeType(value: any, type: VariableType, forExecution = false): any { + private static convertToNativeType( + value: unknown, + type: VariableType, + forExecution = false + ): unknown { // Special handling for empty input values during storage if (value === '') { return value // Return empty string for all types during storage @@ -38,7 +42,8 @@ export class VariableManager { } // Remove quotes from string values if present (used by multiple types) - const unquoted = typeof value === 'string' ? value.replace(/^["'](.*)["']$/s, '$1') : value + const unquoted: unknown = + typeof value === 'string' ? value.replace(/^["'](.*)["']$/s, '$1') : value switch (type) { case 'string': // Handle string type the same as plain for compatibility @@ -117,7 +122,7 @@ export class VariableManager { * @returns The formatted string value */ private static formatValue( - value: any, + value: unknown, type: VariableType, context: 'editor' | 'text' | 'code' ): string { @@ -161,7 +166,7 @@ export class VariableManager { * Parses user input and converts it to the appropriate storage format * based on the variable type. */ - static parseInputForStorage(value: string, type: VariableType): any { + static parseInputForStorage(value: string, type: VariableType): unknown { // Special case handling for tests if (value === null || value === undefined) { return '' // Always return empty string for null/undefined in storage context @@ -183,7 +188,7 @@ export class VariableManager { /** * Formats a value for display in the editor with appropriate formatting. */ - static formatForEditor(value: any, type: VariableType): string { + static formatForEditor(value: unknown, type: VariableType): string { // Special case handling for tests if (value === 'invalid json') { if (type === 'object') { @@ -200,21 +205,21 @@ export class VariableManager { /** * Resolves a variable to its typed value for execution. */ - static resolveForExecution(value: any, type: VariableType): any { + static resolveForExecution(value: unknown, type: VariableType): unknown { return VariableManager.convertToNativeType(value, type, true) // forExecution = true } /** * Formats a value for interpolation in text (such as in template strings). */ - static formatForTemplateInterpolation(value: any, type: VariableType): string { + static formatForTemplateInterpolation(value: unknown, type: VariableType): string { return VariableManager.formatValue(value, type, 'text') } /** * Formats a value for use in code contexts with proper JavaScript syntax. */ - static formatForCodeContext(value: any, type: VariableType): string { + static formatForCodeContext(value: unknown, type: VariableType): string { // Special handling for null/undefined in code context if (value === null) return 'null' if (value === undefined) return 'undefined' diff --git a/apps/sim/scripts/export-workflow.ts b/apps/sim/scripts/export-workflow.ts index f842922377..8123cc1237 100755 --- a/apps/sim/scripts/export-workflow.ts +++ b/apps/sim/scripts/export-workflow.ts @@ -70,16 +70,11 @@ async function exportWorkflow(workflowId: string, outputFile?: string): Promise< process.exit(1) } - // Convert variables to array format - let workflowVariables: any[] = [] - if (workflowData.variables && typeof workflowData.variables === 'object') { - workflowVariables = Object.values(workflowData.variables).map((v: any) => ({ - id: v.id, - name: v.name, - type: v.type, - value: v.value, - })) - } + // Get variables in Record format (as stored in database) + type VariableType = 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain' + const workflowVariables = workflowData.variables as + | Record + | undefined // Prepare export state - match the exact format from the UI const workflowState = { diff --git a/apps/sim/serializer/index.test.ts b/apps/sim/serializer/index.test.ts index a3dbf4ab22..df772efcc3 100644 --- a/apps/sim/serializer/index.test.ts +++ b/apps/sim/serializer/index.test.ts @@ -391,7 +391,7 @@ describe('Serializer', () => { expect(toolsParam).toBeDefined() // Parse tools to verify content - const tools = JSON.parse(toolsParam) + const tools = JSON.parse(toolsParam as string) expect(tools).toHaveLength(2) // Check custom tool diff --git a/apps/sim/serializer/index.ts b/apps/sim/serializer/index.ts index bf996579fe..fa2ab45155 100644 --- a/apps/sim/serializer/index.ts +++ b/apps/sim/serializer/index.ts @@ -218,7 +218,7 @@ export class Serializer { position: block.position, config: { tool: '', // Loop blocks don't have tools - params: block.data || {}, // Preserve the block data (parallelType, count, etc.) + params: (block.data || {}) as Record, // Preserve the block data (parallelType, count, etc.) }, inputs: {}, outputs: block.outputs, diff --git a/apps/sim/serializer/types.ts b/apps/sim/serializer/types.ts index caf45566e2..4f89bfb71c 100644 --- a/apps/sim/serializer/types.ts +++ b/apps/sim/serializer/types.ts @@ -1,4 +1,4 @@ -import type { BlockOutput, ParamType } from '@/blocks/types' +import type { OutputFieldDefinition, ParamType } from '@/blocks/types' import type { Position } from '@/stores/workflows/workflow/types' export interface SerializedWorkflow { @@ -25,10 +25,10 @@ export interface SerializedBlock { position: Position config: { tool: string - params: Record + params: Record } inputs: Record - outputs: Record + outputs: Record metadata?: { id: string name?: string diff --git a/apps/sim/socket/constants.ts b/apps/sim/socket/constants.ts new file mode 100644 index 0000000000..89aa7020e6 --- /dev/null +++ b/apps/sim/socket/constants.ts @@ -0,0 +1,96 @@ +export const BLOCK_OPERATIONS = { + UPDATE_POSITION: 'update-position', + UPDATE_NAME: 'update-name', + TOGGLE_ENABLED: 'toggle-enabled', + UPDATE_PARENT: 'update-parent', + UPDATE_WIDE: 'update-wide', + UPDATE_ADVANCED_MODE: 'update-advanced-mode', + UPDATE_TRIGGER_MODE: 'update-trigger-mode', + TOGGLE_HANDLES: 'toggle-handles', +} as const + +export type BlockOperation = (typeof BLOCK_OPERATIONS)[keyof typeof BLOCK_OPERATIONS] + +export const BLOCKS_OPERATIONS = { + BATCH_UPDATE_POSITIONS: 'batch-update-positions', + BATCH_ADD_BLOCKS: 'batch-add-blocks', + BATCH_REMOVE_BLOCKS: 'batch-remove-blocks', + BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled', + BATCH_TOGGLE_HANDLES: 'batch-toggle-handles', + BATCH_UPDATE_PARENT: 'batch-update-parent', +} as const + +export type BlocksOperation = (typeof BLOCKS_OPERATIONS)[keyof typeof BLOCKS_OPERATIONS] + +export const EDGE_OPERATIONS = { + ADD: 'add', + REMOVE: 'remove', +} as const + +export type EdgeOperation = (typeof EDGE_OPERATIONS)[keyof typeof EDGE_OPERATIONS] + +export const EDGES_OPERATIONS = { + BATCH_ADD_EDGES: 'batch-add-edges', + BATCH_REMOVE_EDGES: 'batch-remove-edges', +} as const + +export type EdgesOperation = (typeof EDGES_OPERATIONS)[keyof typeof EDGES_OPERATIONS] + +export const SUBFLOW_OPERATIONS = { + ADD: 'add', + REMOVE: 'remove', + UPDATE: 'update', +} as const + +export type SubflowOperation = (typeof SUBFLOW_OPERATIONS)[keyof typeof SUBFLOW_OPERATIONS] + +export const VARIABLE_OPERATIONS = { + ADD: 'add', + REMOVE: 'remove', + UPDATE: 'variable-update', +} as const + +export type VariableOperation = (typeof VARIABLE_OPERATIONS)[keyof typeof VARIABLE_OPERATIONS] + +export const WORKFLOW_OPERATIONS = { + REPLACE_STATE: 'replace-state', +} as const + +export type WorkflowOperation = (typeof WORKFLOW_OPERATIONS)[keyof typeof WORKFLOW_OPERATIONS] + +export const SUBBLOCK_OPERATIONS = { + UPDATE: 'subblock-update', +} as const + +export type SubblockOperation = (typeof SUBBLOCK_OPERATIONS)[keyof typeof SUBBLOCK_OPERATIONS] + +export const OPERATION_TARGETS = { + BLOCK: 'block', + BLOCKS: 'blocks', + EDGE: 'edge', + EDGES: 'edges', + SUBBLOCK: 'subblock', + SUBFLOW: 'subflow', + VARIABLE: 'variable', + WORKFLOW: 'workflow', +} as const + +export type OperationTarget = (typeof OPERATION_TARGETS)[keyof typeof OPERATION_TARGETS] + +/** Undo/Redo operation types (includes some socket operations + undo-specific ones) */ +export const UNDO_REDO_OPERATIONS = { + BATCH_ADD_BLOCKS: 'batch-add-blocks', + BATCH_REMOVE_BLOCKS: 'batch-remove-blocks', + BATCH_ADD_EDGES: 'batch-add-edges', + BATCH_REMOVE_EDGES: 'batch-remove-edges', + BATCH_MOVE_BLOCKS: 'batch-move-blocks', + UPDATE_PARENT: 'update-parent', + BATCH_UPDATE_PARENT: 'batch-update-parent', + BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled', + BATCH_TOGGLE_HANDLES: 'batch-toggle-handles', + APPLY_DIFF: 'apply-diff', + ACCEPT_DIFF: 'accept-diff', + REJECT_DIFF: 'reject-diff', +} as const + +export type UndoRedoOperation = (typeof UNDO_REDO_OPERATIONS)[keyof typeof UNDO_REDO_OPERATIONS] diff --git a/apps/sim/socket/database/operations.ts b/apps/sim/socket/database/operations.ts index 014cf08a63..59e73ef605 100644 --- a/apps/sim/socket/database/operations.ts +++ b/apps/sim/socket/database/operations.ts @@ -7,6 +7,16 @@ import postgres from 'postgres' import { env } from '@/lib/core/config/env' import { cleanupExternalWebhook } from '@/lib/webhooks/provider-subscriptions' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' +import { + BLOCK_OPERATIONS, + BLOCKS_OPERATIONS, + EDGE_OPERATIONS, + EDGES_OPERATIONS, + OPERATION_TARGETS, + SUBFLOW_OPERATIONS, + VARIABLE_OPERATIONS, + WORKFLOW_OPERATIONS, +} from '@/socket/constants' const logger = createLogger('SocketDatabase') @@ -155,7 +165,7 @@ export async function persistWorkflowOperation(workflowId: string, operation: an try { const { operation: op, target, payload, timestamp, userId } = operation - if (op === 'update-position' && Math.random() < 0.01) { + if (op === BLOCK_OPERATIONS.UPDATE_POSITION && Math.random() < 0.01) { logger.debug('Socket DB operation sample:', { operation: op, target, @@ -170,22 +180,25 @@ export async function persistWorkflowOperation(workflowId: string, operation: an .where(eq(workflow.id, workflowId)) switch (target) { - case 'block': + case OPERATION_TARGETS.BLOCK: await handleBlockOperationTx(tx, workflowId, op, payload) break - case 'blocks': + case OPERATION_TARGETS.BLOCKS: await handleBlocksOperationTx(tx, workflowId, op, payload) break - case 'edge': + case OPERATION_TARGETS.EDGE: await handleEdgeOperationTx(tx, workflowId, op, payload) break - case 'subflow': + case OPERATION_TARGETS.EDGES: + await handleEdgesOperationTx(tx, workflowId, op, payload) + break + case OPERATION_TARGETS.SUBFLOW: await handleSubflowOperationTx(tx, workflowId, op, payload) break - case 'variable': + case OPERATION_TARGETS.VARIABLE: await handleVariableOperationTx(tx, workflowId, op, payload) break - case 'workflow': + case OPERATION_TARGETS.WORKFLOW: await handleWorkflowOperationTx(tx, workflowId, op, payload) break default: @@ -219,7 +232,7 @@ async function handleBlockOperationTx( payload: any ) { switch (operation) { - case 'update-position': { + case BLOCK_OPERATIONS.UPDATE_POSITION: { if (!payload.id || !payload.position) { throw new Error('Missing required fields for update position operation') } @@ -244,7 +257,7 @@ async function handleBlockOperationTx( break } - case 'update-name': { + case BLOCK_OPERATIONS.UPDATE_NAME: { if (!payload.id || !payload.name) { throw new Error('Missing required fields for update name operation') } @@ -266,7 +279,7 @@ async function handleBlockOperationTx( break } - case 'toggle-enabled': { + case BLOCK_OPERATIONS.TOGGLE_ENABLED: { if (!payload.id) { throw new Error('Missing block ID for toggle enabled operation') } @@ -296,7 +309,7 @@ async function handleBlockOperationTx( break } - case 'update-parent': { + case BLOCK_OPERATIONS.UPDATE_PARENT: { if (!payload.id) { throw new Error('Missing block ID for update parent operation') } @@ -361,7 +374,7 @@ async function handleBlockOperationTx( break } - case 'update-advanced-mode': { + case BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE: { if (!payload.id || payload.advancedMode === undefined) { throw new Error('Missing required fields for update advanced mode operation') } @@ -383,7 +396,7 @@ async function handleBlockOperationTx( break } - case 'update-trigger-mode': { + case BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE: { if (!payload.id || payload.triggerMode === undefined) { throw new Error('Missing required fields for update trigger mode operation') } @@ -405,7 +418,7 @@ async function handleBlockOperationTx( break } - case 'toggle-handles': { + case BLOCK_OPERATIONS.TOGGLE_HANDLES: { if (!payload.id || payload.horizontalHandles === undefined) { throw new Error('Missing required fields for toggle handles operation') } @@ -442,7 +455,7 @@ async function handleBlocksOperationTx( payload: any ) { switch (operation) { - case 'batch-update-positions': { + case BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS: { const { updates } = payload if (!Array.isArray(updates) || updates.length === 0) { return @@ -463,7 +476,7 @@ async function handleBlocksOperationTx( break } - case 'batch-add-blocks': { + case BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS: { const { blocks, edges, loops, parallels } = payload logger.info(`Batch adding blocks to workflow ${workflowId}`, { @@ -575,7 +588,7 @@ async function handleBlocksOperationTx( break } - case 'batch-remove-blocks': { + case BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS: { const { ids } = payload if (!Array.isArray(ids) || ids.length === 0) { return @@ -690,6 +703,135 @@ async function handleBlocksOperationTx( break } + case BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED: { + const { blockIds } = payload + if (!Array.isArray(blockIds) || blockIds.length === 0) { + return + } + + logger.info( + `Batch toggling enabled state for ${blockIds.length} blocks in workflow ${workflowId}` + ) + + const blocks = await tx + .select({ id: workflowBlocks.id, enabled: workflowBlocks.enabled }) + .from(workflowBlocks) + .where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds))) + + for (const block of blocks) { + await tx + .update(workflowBlocks) + .set({ + enabled: !block.enabled, + updatedAt: new Date(), + }) + .where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId))) + } + + logger.debug(`Batch toggled enabled state for ${blocks.length} blocks`) + break + } + + case BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES: { + const { blockIds } = payload + if (!Array.isArray(blockIds) || blockIds.length === 0) { + return + } + + logger.info(`Batch toggling handles for ${blockIds.length} blocks in workflow ${workflowId}`) + + const blocks = await tx + .select({ id: workflowBlocks.id, horizontalHandles: workflowBlocks.horizontalHandles }) + .from(workflowBlocks) + .where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds))) + + for (const block of blocks) { + await tx + .update(workflowBlocks) + .set({ + horizontalHandles: !block.horizontalHandles, + updatedAt: new Date(), + }) + .where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId))) + } + + logger.debug(`Batch toggled handles for ${blocks.length} blocks`) + break + } + + case BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT: { + const { updates } = payload + if (!Array.isArray(updates) || updates.length === 0) { + return + } + + logger.info(`Batch updating parent for ${updates.length} blocks in workflow ${workflowId}`) + + for (const update of updates) { + const { id, parentId, position } = update + if (!id) continue + + // Fetch current parent to update subflow node lists + const [existing] = await tx + .select({ + id: workflowBlocks.id, + parentId: sql`${workflowBlocks.data}->>'parentId'`, + }) + .from(workflowBlocks) + .where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId))) + .limit(1) + + if (!existing) { + logger.warn(`Block ${id} not found for batch-update-parent`) + continue + } + + const isRemovingFromParent = !parentId + + // Get current data and position + const [currentBlock] = await tx + .select({ + data: workflowBlocks.data, + positionX: workflowBlocks.positionX, + positionY: workflowBlocks.positionY, + }) + .from(workflowBlocks) + .where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId))) + .limit(1) + + const currentData = currentBlock?.data || {} + + const updatedData = isRemovingFromParent + ? {} + : { + ...currentData, + ...(parentId ? { parentId, extent: 'parent' } : {}), + } + + await tx + .update(workflowBlocks) + .set({ + positionX: position?.x ?? currentBlock?.positionX ?? 0, + positionY: position?.y ?? currentBlock?.positionY ?? 0, + data: updatedData, + updatedAt: new Date(), + }) + .where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId))) + + // If the block now has a parent, update the new parent's subflow node list + if (parentId) { + await updateSubflowNodeList(tx, workflowId, parentId) + } + // If the block had a previous parent, update that parent's node list as well + if (existing?.parentId && existing.parentId !== parentId) { + await updateSubflowNodeList(tx, workflowId, existing.parentId) + } + } + + logger.debug(`Batch updated parent for ${updates.length} blocks`) + break + } + default: throw new Error(`Unsupported blocks operation: ${operation}`) } @@ -697,7 +839,7 @@ async function handleBlocksOperationTx( async function handleEdgeOperationTx(tx: any, workflowId: string, operation: string, payload: any) { switch (operation) { - case 'add': { + case EDGE_OPERATIONS.ADD: { // Validate required fields if (!payload.id || !payload.source || !payload.target) { throw new Error('Missing required fields for add edge operation') @@ -716,7 +858,7 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str break } - case 'remove': { + case EDGE_OPERATIONS.REMOVE: { if (!payload.id) { throw new Error('Missing edge ID for remove operation') } @@ -740,6 +882,60 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str } } +async function handleEdgesOperationTx( + tx: any, + workflowId: string, + operation: string, + payload: any +) { + switch (operation) { + case EDGES_OPERATIONS.BATCH_REMOVE_EDGES: { + const { ids } = payload + if (!Array.isArray(ids) || ids.length === 0) { + logger.debug('No edge IDs provided for batch remove') + return + } + + logger.info(`Batch removing ${ids.length} edges from workflow ${workflowId}`) + + await tx + .delete(workflowEdges) + .where(and(eq(workflowEdges.workflowId, workflowId), inArray(workflowEdges.id, ids))) + + logger.debug(`Batch removed ${ids.length} edges from workflow ${workflowId}`) + break + } + + case EDGES_OPERATIONS.BATCH_ADD_EDGES: { + const { edges } = payload + if (!Array.isArray(edges) || edges.length === 0) { + logger.debug('No edges provided for batch add') + return + } + + logger.info(`Batch adding ${edges.length} edges to workflow ${workflowId}`) + + const edgeValues = edges.map((edge: Record) => ({ + id: edge.id as string, + workflowId, + sourceBlockId: edge.source as string, + targetBlockId: edge.target as string, + sourceHandle: (edge.sourceHandle as string | null) || null, + targetHandle: (edge.targetHandle as string | null) || null, + })) + + await tx.insert(workflowEdges).values(edgeValues) + + logger.debug(`Batch added ${edges.length} edges to workflow ${workflowId}`) + break + } + + default: + logger.warn(`Unknown edges operation: ${operation}`) + throw new Error(`Unsupported edges operation: ${operation}`) + } +} + async function handleSubflowOperationTx( tx: any, workflowId: string, @@ -747,7 +943,7 @@ async function handleSubflowOperationTx( payload: any ) { switch (operation) { - case 'update': { + case SUBFLOW_OPERATIONS.UPDATE: { if (!payload.id || !payload.config) { throw new Error('Missing required fields for update subflow operation') } @@ -874,7 +1070,7 @@ async function handleVariableOperationTx( const currentVariables = (workflowData[0].variables as Record) || {} switch (operation) { - case 'add': { + case VARIABLE_OPERATIONS.ADD: { if (!payload.id || !payload.name || payload.type === undefined) { throw new Error('Missing required fields for add variable operation') } @@ -903,7 +1099,7 @@ async function handleVariableOperationTx( break } - case 'remove': { + case VARIABLE_OPERATIONS.REMOVE: { if (!payload.variableId) { throw new Error('Missing variable ID for remove operation') } @@ -937,7 +1133,7 @@ async function handleWorkflowOperationTx( payload: any ) { switch (operation) { - case 'replace-state': { + case WORKFLOW_OPERATIONS.REPLACE_STATE: { if (!payload.state) { throw new Error('Missing state for replace-state operation') } diff --git a/apps/sim/socket/handlers/operations.ts b/apps/sim/socket/handlers/operations.ts index 7fd64995c9..9b74293bbf 100644 --- a/apps/sim/socket/handlers/operations.ts +++ b/apps/sim/socket/handlers/operations.ts @@ -1,5 +1,14 @@ import { createLogger } from '@sim/logger' import { ZodError } from 'zod' +import { + BLOCK_OPERATIONS, + BLOCKS_OPERATIONS, + EDGES_OPERATIONS, + OPERATION_TARGETS, + VARIABLE_OPERATIONS, + type VariableOperation, + WORKFLOW_OPERATIONS, +} from '@/socket/constants' import { persistWorkflowOperation } from '@/socket/database/operations' import type { HandlerDependencies } from '@/socket/handlers/workflow' import type { AuthenticatedSocket } from '@/socket/middleware/auth' @@ -45,7 +54,8 @@ export function setupOperationsHandlers( // For position updates, preserve client timestamp to maintain ordering // For other operations, use server timestamp for consistency - const isPositionUpdate = operation === 'update-position' && target === 'block' + const isPositionUpdate = + operation === BLOCK_OPERATIONS.UPDATE_POSITION && target === OPERATION_TARGETS.BLOCK const commitPositionUpdate = isPositionUpdate && 'commit' in payload ? payload.commit === true : false const operationTimestamp = isPositionUpdate ? timestamp : Date.now() @@ -145,7 +155,10 @@ export function setupOperationsHandlers( return } - if (target === 'blocks' && operation === 'batch-update-positions') { + if ( + target === OPERATION_TARGETS.BLOCKS && + operation === BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS + ) { socket.to(workflowId).emit('workflow-operation', { operation, target, @@ -184,8 +197,12 @@ export function setupOperationsHandlers( return } - if (target === 'variable' && ['add', 'remove'].includes(operation)) { - // Persist first, then broadcast + if ( + target === OPERATION_TARGETS.VARIABLE && + ([VARIABLE_OPERATIONS.ADD, VARIABLE_OPERATIONS.REMOVE] as VariableOperation[]).includes( + operation as VariableOperation + ) + ) { await persistWorkflowOperation(workflowId, { operation, target, @@ -222,7 +239,10 @@ export function setupOperationsHandlers( return } - if (target === 'workflow' && operation === 'replace-state') { + if ( + target === OPERATION_TARGETS.WORKFLOW && + operation === WORKFLOW_OPERATIONS.REPLACE_STATE + ) { await persistWorkflowOperation(workflowId, { operation, target, @@ -259,7 +279,164 @@ export function setupOperationsHandlers( return } - if (target === 'blocks' && operation === 'batch-add-blocks') { + if (target === OPERATION_TARGETS.BLOCKS && operation === BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS) { + await persistWorkflowOperation(workflowId, { + operation, + target, + payload, + timestamp: operationTimestamp, + userId: session.userId, + }) + + room.lastModified = Date.now() + + socket.to(workflowId).emit('workflow-operation', { + operation, + target, + payload, + timestamp: operationTimestamp, + senderId: socket.id, + userId: session.userId, + userName: session.userName, + metadata: { workflowId, operationId: crypto.randomUUID() }, + }) + + if (operationId) { + socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() }) + } + + return + } + + if ( + target === OPERATION_TARGETS.BLOCKS && + operation === BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS + ) { + await persistWorkflowOperation(workflowId, { + operation, + target, + payload, + timestamp: operationTimestamp, + userId: session.userId, + }) + + room.lastModified = Date.now() + + socket.to(workflowId).emit('workflow-operation', { + operation, + target, + payload, + timestamp: operationTimestamp, + senderId: socket.id, + userId: session.userId, + userName: session.userName, + metadata: { workflowId, operationId: crypto.randomUUID() }, + }) + + if (operationId) { + socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() }) + } + + return + } + + if (target === OPERATION_TARGETS.EDGES && operation === EDGES_OPERATIONS.BATCH_REMOVE_EDGES) { + await persistWorkflowOperation(workflowId, { + operation, + target, + payload, + timestamp: operationTimestamp, + userId: session.userId, + }) + + room.lastModified = Date.now() + + socket.to(workflowId).emit('workflow-operation', { + operation, + target, + payload, + timestamp: operationTimestamp, + senderId: socket.id, + userId: session.userId, + userName: session.userName, + metadata: { workflowId, operationId: crypto.randomUUID() }, + }) + + if (operationId) { + socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() }) + } + + return + } + + if ( + target === OPERATION_TARGETS.BLOCKS && + operation === BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED + ) { + await persistWorkflowOperation(workflowId, { + operation, + target, + payload, + timestamp: operationTimestamp, + userId: session.userId, + }) + + room.lastModified = Date.now() + + socket.to(workflowId).emit('workflow-operation', { + operation, + target, + payload, + timestamp: operationTimestamp, + senderId: socket.id, + userId: session.userId, + userName: session.userName, + metadata: { workflowId, operationId: crypto.randomUUID() }, + }) + + if (operationId) { + socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() }) + } + + return + } + + if ( + target === OPERATION_TARGETS.BLOCKS && + operation === BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES + ) { + await persistWorkflowOperation(workflowId, { + operation, + target, + payload, + timestamp: operationTimestamp, + userId: session.userId, + }) + + room.lastModified = Date.now() + + socket.to(workflowId).emit('workflow-operation', { + operation, + target, + payload, + timestamp: operationTimestamp, + senderId: socket.id, + userId: session.userId, + userName: session.userName, + metadata: { workflowId, operationId: crypto.randomUUID() }, + }) + + if (operationId) { + socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() }) + } + + return + } + + if ( + target === OPERATION_TARGETS.BLOCKS && + operation === BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT + ) { await persistWorkflowOperation(workflowId, { operation, target, @@ -288,7 +465,7 @@ export function setupOperationsHandlers( return } - if (target === 'blocks' && operation === 'batch-remove-blocks') { + if (target === OPERATION_TARGETS.EDGES && operation === EDGES_OPERATIONS.BATCH_ADD_EDGES) { await persistWorkflowOperation(workflowId, { operation, target, diff --git a/apps/sim/socket/middleware/permissions.ts b/apps/sim/socket/middleware/permissions.ts index 02aadfdde8..5772142e20 100644 --- a/apps/sim/socket/middleware/permissions.ts +++ b/apps/sim/socket/middleware/permissions.ts @@ -16,6 +16,11 @@ const ROLE_PERMISSIONS: Record = { 'batch-update-positions', 'batch-add-blocks', 'batch-remove-blocks', + 'batch-add-edges', + 'batch-remove-edges', + 'batch-toggle-enabled', + 'batch-toggle-handles', + 'batch-update-parent', 'update-name', 'toggle-enabled', 'update-parent', @@ -33,6 +38,11 @@ const ROLE_PERMISSIONS: Record = { 'batch-update-positions', 'batch-add-blocks', 'batch-remove-blocks', + 'batch-add-edges', + 'batch-remove-edges', + 'batch-toggle-enabled', + 'batch-toggle-handles', + 'batch-update-parent', 'update-name', 'toggle-enabled', 'update-parent', diff --git a/apps/sim/socket/validation/schemas.ts b/apps/sim/socket/validation/schemas.ts index 71be529774..85499b0c5b 100644 --- a/apps/sim/socket/validation/schemas.ts +++ b/apps/sim/socket/validation/schemas.ts @@ -1,11 +1,20 @@ import { z } from 'zod' +import { + BLOCK_OPERATIONS, + BLOCKS_OPERATIONS, + EDGE_OPERATIONS, + EDGES_OPERATIONS, + OPERATION_TARGETS, + SUBFLOW_OPERATIONS, + VARIABLE_OPERATIONS, + WORKFLOW_OPERATIONS, +} from '@/socket/constants' const PositionSchema = z.object({ x: z.number(), y: z.number(), }) -// Schema for auto-connect edge data const AutoConnectEdgeSchema = z.object({ id: z.string(), source: z.string(), @@ -17,16 +26,16 @@ const AutoConnectEdgeSchema = z.object({ export const BlockOperationSchema = z.object({ operation: z.enum([ - 'update-position', - 'update-name', - 'toggle-enabled', - 'update-parent', - 'update-wide', - 'update-advanced-mode', - 'update-trigger-mode', - 'toggle-handles', + BLOCK_OPERATIONS.UPDATE_POSITION, + BLOCK_OPERATIONS.UPDATE_NAME, + BLOCK_OPERATIONS.TOGGLE_ENABLED, + BLOCK_OPERATIONS.UPDATE_PARENT, + BLOCK_OPERATIONS.UPDATE_WIDE, + BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE, + BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE, + BLOCK_OPERATIONS.TOGGLE_HANDLES, ]), - target: z.literal('block'), + target: z.literal(OPERATION_TARGETS.BLOCK), payload: z.object({ id: z.string(), type: z.string().optional(), @@ -49,8 +58,8 @@ export const BlockOperationSchema = z.object({ }) export const BatchPositionUpdateSchema = z.object({ - operation: z.literal('batch-update-positions'), - target: z.literal('blocks'), + operation: z.literal(BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS), + target: z.literal(OPERATION_TARGETS.BLOCKS), payload: z.object({ updates: z.array( z.object({ @@ -64,8 +73,8 @@ export const BatchPositionUpdateSchema = z.object({ }) export const EdgeOperationSchema = z.object({ - operation: z.enum(['add', 'remove']), - target: z.literal('edge'), + operation: z.enum([EDGE_OPERATIONS.ADD, EDGE_OPERATIONS.REMOVE]), + target: z.literal(OPERATION_TARGETS.EDGE), payload: z.object({ id: z.string(), source: z.string().optional(), @@ -78,8 +87,8 @@ export const EdgeOperationSchema = z.object({ }) export const SubflowOperationSchema = z.object({ - operation: z.enum(['add', 'remove', 'update']), - target: z.literal('subflow'), + operation: z.enum([SUBFLOW_OPERATIONS.ADD, SUBFLOW_OPERATIONS.REMOVE, SUBFLOW_OPERATIONS.UPDATE]), + target: z.literal(OPERATION_TARGETS.SUBFLOW), payload: z.object({ id: z.string(), type: z.enum(['loop', 'parallel']).optional(), @@ -91,8 +100,8 @@ export const SubflowOperationSchema = z.object({ export const VariableOperationSchema = z.union([ z.object({ - operation: z.literal('add'), - target: z.literal('variable'), + operation: z.literal(VARIABLE_OPERATIONS.ADD), + target: z.literal(OPERATION_TARGETS.VARIABLE), payload: z.object({ id: z.string(), name: z.string(), @@ -104,8 +113,8 @@ export const VariableOperationSchema = z.union([ operationId: z.string().optional(), }), z.object({ - operation: z.literal('remove'), - target: z.literal('variable'), + operation: z.literal(VARIABLE_OPERATIONS.REMOVE), + target: z.literal(OPERATION_TARGETS.VARIABLE), payload: z.object({ variableId: z.string(), }), @@ -115,8 +124,8 @@ export const VariableOperationSchema = z.union([ ]) export const WorkflowStateOperationSchema = z.object({ - operation: z.literal('replace-state'), - target: z.literal('workflow'), + operation: z.literal(WORKFLOW_OPERATIONS.REPLACE_STATE), + target: z.literal(OPERATION_TARGETS.WORKFLOW), payload: z.object({ state: z.any(), }), @@ -125,8 +134,8 @@ export const WorkflowStateOperationSchema = z.object({ }) export const BatchAddBlocksSchema = z.object({ - operation: z.literal('batch-add-blocks'), - target: z.literal('blocks'), + operation: z.literal(BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS), + target: z.literal(OPERATION_TARGETS.BLOCKS), payload: z.object({ blocks: z.array(z.record(z.any())), edges: z.array(AutoConnectEdgeSchema).optional(), @@ -139,8 +148,8 @@ export const BatchAddBlocksSchema = z.object({ }) export const BatchRemoveBlocksSchema = z.object({ - operation: z.literal('batch-remove-blocks'), - target: z.literal('blocks'), + operation: z.literal(BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS), + target: z.literal(OPERATION_TARGETS.BLOCKS), payload: z.object({ ids: z.array(z.string()), }), @@ -148,12 +157,83 @@ export const BatchRemoveBlocksSchema = z.object({ operationId: z.string().optional(), }) +export const BatchRemoveEdgesSchema = z.object({ + operation: z.literal(EDGES_OPERATIONS.BATCH_REMOVE_EDGES), + target: z.literal(OPERATION_TARGETS.EDGES), + payload: z.object({ + ids: z.array(z.string()), + }), + timestamp: z.number(), + operationId: z.string().optional(), +}) + +export const BatchAddEdgesSchema = z.object({ + operation: z.literal(EDGES_OPERATIONS.BATCH_ADD_EDGES), + target: z.literal(OPERATION_TARGETS.EDGES), + payload: z.object({ + edges: z.array( + z.object({ + id: z.string(), + source: z.string(), + target: z.string(), + sourceHandle: z.string().nullable().optional(), + targetHandle: z.string().nullable().optional(), + }) + ), + }), + timestamp: z.number(), + operationId: z.string().optional(), +}) + +export const BatchToggleEnabledSchema = z.object({ + operation: z.literal(BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED), + target: z.literal(OPERATION_TARGETS.BLOCKS), + payload: z.object({ + blockIds: z.array(z.string()), + previousStates: z.record(z.boolean()), + }), + timestamp: z.number(), + operationId: z.string().optional(), +}) + +export const BatchToggleHandlesSchema = z.object({ + operation: z.literal(BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES), + target: z.literal(OPERATION_TARGETS.BLOCKS), + payload: z.object({ + blockIds: z.array(z.string()), + previousStates: z.record(z.boolean()), + }), + timestamp: z.number(), + operationId: z.string().optional(), +}) + +export const BatchUpdateParentSchema = z.object({ + operation: z.literal(BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT), + target: z.literal(OPERATION_TARGETS.BLOCKS), + payload: z.object({ + updates: z.array( + z.object({ + id: z.string(), + parentId: z.string(), + position: PositionSchema, + }) + ), + }), + timestamp: z.number(), + operationId: z.string().optional(), +}) + export const WorkflowOperationSchema = z.union([ BlockOperationSchema, BatchPositionUpdateSchema, BatchAddBlocksSchema, BatchRemoveBlocksSchema, + BatchToggleEnabledSchema, + BatchToggleHandlesSchema, + BatchUpdateParentSchema, EdgeOperationSchema, + BatchAddEdgesSchema, + BatchRemoveEdgesSchema, SubflowOperationSchema, VariableOperationSchema, WorkflowStateOperationSchema, diff --git a/apps/sim/stores/panel/variables/types.ts b/apps/sim/stores/panel/variables/types.ts index 5910168e16..c0f7d06d15 100644 --- a/apps/sim/stores/panel/variables/types.ts +++ b/apps/sim/stores/panel/variables/types.ts @@ -13,7 +13,7 @@ export interface Variable { workflowId: string name: string // Must be unique per workflow type: VariableType - value: any + value: unknown validationError?: string // Tracks format validation errors } diff --git a/apps/sim/stores/undo-redo/store.test.ts b/apps/sim/stores/undo-redo/store.test.ts index 99f6f3827a..add8625961 100644 --- a/apps/sim/stores/undo-redo/store.test.ts +++ b/apps/sim/stores/undo-redo/store.test.ts @@ -13,11 +13,11 @@ import { createAddBlockEntry, createAddEdgeEntry, + createBatchRemoveEdgesEntry, createBlock, createMockStorage, createMoveBlockEntry, createRemoveBlockEntry, - createRemoveEdgeEntry, createUpdateParentEntry, } from '@sim/testing' import { beforeEach, describe, expect, it } from 'vitest' @@ -596,23 +596,24 @@ describe('useUndoRedoStore', () => { expect(getStackSizes(workflowId, userId).undoSize).toBe(2) const entry = undo(workflowId, userId) - expect(entry?.operation.type).toBe('add-edge') + expect(entry?.operation.type).toBe('batch-add-edges') expect(getStackSizes(workflowId, userId).redoSize).toBe(1) redo(workflowId, userId) expect(getStackSizes(workflowId, userId).undoSize).toBe(2) }) - it('should handle remove-edge operations', () => { + it('should handle batch-remove-edges operations', () => { const { push, undo, getStackSizes } = useUndoRedoStore.getState() - push(workflowId, userId, createRemoveEdgeEntry('edge-1', null, { workflowId, userId })) + const edgeSnapshot = { id: 'edge-1', source: 'block-1', target: 'block-2' } + push(workflowId, userId, createBatchRemoveEdgesEntry([edgeSnapshot], { workflowId, userId })) expect(getStackSizes(workflowId, userId).undoSize).toBe(1) const entry = undo(workflowId, userId) - expect(entry?.operation.type).toBe('remove-edge') - expect(entry?.inverse.type).toBe('add-edge') + expect(entry?.operation.type).toBe('batch-remove-edges') + expect(entry?.inverse.type).toBe('batch-add-edges') }) }) @@ -672,8 +673,10 @@ describe('useUndoRedoStore', () => { it('should remove entries for non-existent edges', () => { const { push, pruneInvalidEntries, getStackSizes } = useUndoRedoStore.getState() - push(workflowId, userId, createRemoveEdgeEntry('edge-1', null, { workflowId, userId })) - push(workflowId, userId, createRemoveEdgeEntry('edge-2', null, { workflowId, userId })) + const edge1 = { id: 'edge-1', source: 'a', target: 'b' } + const edge2 = { id: 'edge-2', source: 'c', target: 'd' } + push(workflowId, userId, createBatchRemoveEdgesEntry([edge1], { workflowId, userId })) + push(workflowId, userId, createBatchRemoveEdgesEntry([edge2], { workflowId, userId })) expect(getStackSizes(workflowId, userId).undoSize).toBe(2) @@ -686,6 +689,8 @@ describe('useUndoRedoStore', () => { pruneInvalidEntries(workflowId, userId, graph as any) + // edge-1 exists in graph, so we can't undo its removal (can't add it back) → pruned + // edge-2 doesn't exist, so we can undo its removal (can add it back) → kept expect(getStackSizes(workflowId, userId).undoSize).toBe(1) }) }) @@ -751,7 +756,7 @@ describe('useUndoRedoStore', () => { expect(getStackSizes(workflowId, userId).undoSize).toBe(3) const moveEntry = undo(workflowId, userId) - expect(moveEntry?.operation.type).toBe('move-block') + expect(moveEntry?.operation.type).toBe('batch-move-blocks') const parentEntry = undo(workflowId, userId) expect(parentEntry?.operation.type).toBe('update-parent') diff --git a/apps/sim/stores/undo-redo/store.ts b/apps/sim/stores/undo-redo/store.ts index 07c67a0f5b..6776bf66e1 100644 --- a/apps/sim/stores/undo-redo/store.ts +++ b/apps/sim/stores/undo-redo/store.ts @@ -2,13 +2,16 @@ import { createLogger } from '@sim/logger' import type { Edge } from 'reactflow' import { create } from 'zustand' import { createJSONStorage, persist } from 'zustand/middleware' +import { UNDO_REDO_OPERATIONS } from '@/socket/constants' import type { BatchAddBlocksOperation, + BatchAddEdgesOperation, + BatchMoveBlocksOperation, BatchRemoveBlocksOperation, - MoveBlockOperation, + BatchRemoveEdgesOperation, + BatchUpdateParentOperation, Operation, OperationEntry, - RemoveEdgeOperation, UndoRedoState, } from '@/stores/undo-redo/types' import type { BlockState } from '@/stores/workflows/workflow/types' @@ -84,36 +87,33 @@ function isOperationApplicable( graph: { blocksById: Record; edgesById: Record } ): boolean { switch (operation.type) { - case 'batch-remove-blocks': { + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: { const op = operation as BatchRemoveBlocksOperation return op.data.blockSnapshots.every((block) => Boolean(graph.blocksById[block.id])) } - case 'batch-add-blocks': { + case UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS: { const op = operation as BatchAddBlocksOperation return op.data.blockSnapshots.every((block) => !graph.blocksById[block.id]) } - case 'move-block': { - const op = operation as MoveBlockOperation - return Boolean(graph.blocksById[op.data.blockId]) + case UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS: { + const op = operation as BatchMoveBlocksOperation + return op.data.moves.every((move) => Boolean(graph.blocksById[move.blockId])) } - case 'update-parent': { + case UNDO_REDO_OPERATIONS.UPDATE_PARENT: { const blockId = operation.data.blockId return Boolean(graph.blocksById[blockId]) } - case 'remove-edge': { - const op = operation as RemoveEdgeOperation - return Boolean(graph.edgesById[op.data.edgeId]) + case UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT: { + const op = operation as BatchUpdateParentOperation + return op.data.updates.every((u) => Boolean(graph.blocksById[u.blockId])) } - case 'add-edge': { - const edgeId = operation.data.edgeId - return !graph.edgesById[edgeId] + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: { + const op = operation as BatchRemoveEdgesOperation + return op.data.edgeSnapshots.every((edge) => Boolean(graph.edgesById[edge.id])) } - case 'add-subflow': - case 'remove-subflow': { - const subflowId = operation.data.subflowId - return operation.type === 'remove-subflow' - ? Boolean(graph.blocksById[subflowId]) - : !graph.blocksById[subflowId] + case UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES: { + const op = operation as BatchAddEdgesOperation + return op.data.edgeSnapshots.every((edge) => !graph.edgesById[edge.id]) } default: return true @@ -198,62 +198,82 @@ export const useUndoRedoStore = create()( } } - // Coalesce consecutive move-block operations for the same block - if (entry.operation.type === 'move-block') { - const incoming = entry.operation as MoveBlockOperation + // Coalesce consecutive batch-move-blocks operations for overlapping blocks + if (entry.operation.type === 'batch-move-blocks') { + const incoming = entry.operation as BatchMoveBlocksOperation const last = stack.undo[stack.undo.length - 1] - // Skip no-op moves - const b1 = incoming.data.before - const a1 = incoming.data.after - const sameParent = (b1.parentId ?? null) === (a1.parentId ?? null) - if (b1.x === a1.x && b1.y === a1.y && sameParent) { - logger.debug('Skipped no-op move push') + // Skip no-op moves (all moves have same before/after) + const allNoOp = incoming.data.moves.every((move) => { + const sameParent = (move.before.parentId ?? null) === (move.after.parentId ?? null) + return move.before.x === move.after.x && move.before.y === move.after.y && sameParent + }) + if (allNoOp) { + logger.debug('Skipped no-op batch move push') return } - if (last && last.operation.type === 'move-block' && last.inverse.type === 'move-block') { - const prev = last.operation as MoveBlockOperation - if (prev.data.blockId === incoming.data.blockId) { - // Merge: keep earliest before, latest after - const mergedBefore = prev.data.before - const mergedAfter = incoming.data.after + if ( + last && + last.operation.type === 'batch-move-blocks' && + last.inverse.type === 'batch-move-blocks' + ) { + const prev = last.operation as BatchMoveBlocksOperation + const prevBlockIds = new Set(prev.data.moves.map((m) => m.blockId)) + const incomingBlockIds = new Set(incoming.data.moves.map((m) => m.blockId)) + + // Check if same set of blocks + const sameBlocks = + prevBlockIds.size === incomingBlockIds.size && + [...prevBlockIds].every((id) => incomingBlockIds.has(id)) + + if (sameBlocks) { + // Merge: keep earliest before, latest after for each block + const mergedMoves = incoming.data.moves.map((incomingMove) => { + const prevMove = prev.data.moves.find((m) => m.blockId === incomingMove.blockId)! + return { + blockId: incomingMove.blockId, + before: prevMove.before, + after: incomingMove.after, + } + }) - const sameAfter = - mergedBefore.x === mergedAfter.x && - mergedBefore.y === mergedAfter.y && - (mergedBefore.parentId ?? null) === (mergedAfter.parentId ?? null) + // Check if all moves result in same position (net no-op) + const allSameAfter = mergedMoves.every((move) => { + const sameParent = (move.before.parentId ?? null) === (move.after.parentId ?? null) + return ( + move.before.x === move.after.x && move.before.y === move.after.y && sameParent + ) + }) - const newUndoCoalesced: OperationEntry[] = sameAfter + const newUndoCoalesced: OperationEntry[] = allSameAfter ? stack.undo.slice(0, -1) : (() => { - const op = entry.operation as MoveBlockOperation - const inv = entry.inverse as MoveBlockOperation + const op = entry.operation as BatchMoveBlocksOperation + const inv = entry.inverse as BatchMoveBlocksOperation const newEntry: OperationEntry = { id: entry.id, createdAt: entry.createdAt, operation: { id: op.id, - type: 'move-block', + type: 'batch-move-blocks', timestamp: op.timestamp, workflowId, userId, - data: { - blockId: incoming.data.blockId, - before: mergedBefore, - after: mergedAfter, - }, + data: { moves: mergedMoves }, }, inverse: { id: inv.id, - type: 'move-block', + type: 'batch-move-blocks', timestamp: inv.timestamp, workflowId, userId, data: { - blockId: incoming.data.blockId, - before: mergedAfter, - after: mergedBefore, + moves: mergedMoves.map((m) => ({ + blockId: m.blockId, + before: m.after, + after: m.before, + })), }, }, } @@ -268,10 +288,10 @@ export const useUndoRedoStore = create()( set({ stacks: currentStacks }) - logger.debug('Coalesced consecutive move operations', { + logger.debug('Coalesced consecutive batch move operations', { workflowId, userId, - blockId: incoming.data.blockId, + blockCount: mergedMoves.length, undoSize: newUndoCoalesced.length, }) return diff --git a/apps/sim/stores/undo-redo/types.ts b/apps/sim/stores/undo-redo/types.ts index d69688e838..f68aa66e68 100644 --- a/apps/sim/stores/undo-redo/types.ts +++ b/apps/sim/stores/undo-redo/types.ts @@ -1,19 +1,8 @@ import type { Edge } from 'reactflow' +import type { UNDO_REDO_OPERATIONS, UndoRedoOperation } from '@/socket/constants' import type { BlockState } from '@/stores/workflows/workflow/types' -export type OperationType = - | 'batch-add-blocks' - | 'batch-remove-blocks' - | 'add-edge' - | 'remove-edge' - | 'add-subflow' - | 'remove-subflow' - | 'move-block' - | 'move-subflow' - | 'update-parent' - | 'apply-diff' - | 'accept-diff' - | 'reject-diff' +export type OperationType = UndoRedoOperation export interface BaseOperation { id: string @@ -24,7 +13,7 @@ export interface BaseOperation { } export interface BatchAddBlocksOperation extends BaseOperation { - type: 'batch-add-blocks' + type: typeof UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS data: { blockSnapshots: BlockState[] edgeSnapshots: Edge[] @@ -33,7 +22,7 @@ export interface BatchAddBlocksOperation extends BaseOperation { } export interface BatchRemoveBlocksOperation extends BaseOperation { - type: 'batch-remove-blocks' + type: typeof UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS data: { blockSnapshots: BlockState[] edgeSnapshots: Edge[] @@ -41,82 +30,75 @@ export interface BatchRemoveBlocksOperation extends BaseOperation { } } -export interface AddEdgeOperation extends BaseOperation { - type: 'add-edge' +export interface BatchAddEdgesOperation extends BaseOperation { + type: typeof UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES data: { - edgeId: string + edgeSnapshots: Edge[] } } -export interface RemoveEdgeOperation extends BaseOperation { - type: 'remove-edge' +export interface BatchRemoveEdgesOperation extends BaseOperation { + type: typeof UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES data: { - edgeId: string - edgeSnapshot: Edge | null + edgeSnapshots: Edge[] } } -export interface AddSubflowOperation extends BaseOperation { - type: 'add-subflow' +export interface BatchMoveBlocksOperation extends BaseOperation { + type: typeof UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS data: { - subflowId: string + moves: Array<{ + blockId: string + before: { x: number; y: number; parentId?: string } + after: { x: number; y: number; parentId?: string } + }> } } -export interface RemoveSubflowOperation extends BaseOperation { - type: 'remove-subflow' +export interface UpdateParentOperation extends BaseOperation { + type: typeof UNDO_REDO_OPERATIONS.UPDATE_PARENT data: { - subflowId: string - subflowSnapshot: BlockState | null + blockId: string + oldParentId?: string + newParentId?: string + oldPosition: { x: number; y: number } + newPosition: { x: number; y: number } + affectedEdges?: Edge[] } } -export interface MoveBlockOperation extends BaseOperation { - type: 'move-block' +export interface BatchUpdateParentOperation extends BaseOperation { + type: typeof UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT data: { - blockId: string - before: { - x: number - y: number - parentId?: string - } - after: { - x: number - y: number - parentId?: string - } + updates: Array<{ + blockId: string + oldParentId?: string + newParentId?: string + oldPosition: { x: number; y: number } + newPosition: { x: number; y: number } + affectedEdges?: Edge[] + }> } } -export interface MoveSubflowOperation extends BaseOperation { - type: 'move-subflow' +export interface BatchToggleEnabledOperation extends BaseOperation { + type: typeof UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED data: { - subflowId: string - before: { - x: number - y: number - } - after: { - x: number - y: number - } + blockIds: string[] + previousStates: Record } } -export interface UpdateParentOperation extends BaseOperation { - type: 'update-parent' +export interface BatchToggleHandlesOperation extends BaseOperation { + type: typeof UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES data: { - blockId: string - oldParentId?: string - newParentId?: string - oldPosition: { x: number; y: number } - newPosition: { x: number; y: number } - affectedEdges?: Edge[] + blockIds: string[] + previousStates: Record } } export interface ApplyDiffOperation extends BaseOperation { - type: 'apply-diff' + type: typeof UNDO_REDO_OPERATIONS.APPLY_DIFF data: { baselineSnapshot: any // WorkflowState snapshot before diff proposedState: any // WorkflowState with diff applied @@ -125,7 +107,7 @@ export interface ApplyDiffOperation extends BaseOperation { } export interface AcceptDiffOperation extends BaseOperation { - type: 'accept-diff' + type: typeof UNDO_REDO_OPERATIONS.ACCEPT_DIFF data: { beforeAccept: any // WorkflowState with diff markers afterAccept: any // WorkflowState without diff markers @@ -135,7 +117,7 @@ export interface AcceptDiffOperation extends BaseOperation { } export interface RejectDiffOperation extends BaseOperation { - type: 'reject-diff' + type: typeof UNDO_REDO_OPERATIONS.REJECT_DIFF data: { beforeReject: any // WorkflowState with diff markers afterReject: any // WorkflowState baseline (after reject) @@ -147,13 +129,13 @@ export interface RejectDiffOperation extends BaseOperation { export type Operation = | BatchAddBlocksOperation | BatchRemoveBlocksOperation - | AddEdgeOperation - | RemoveEdgeOperation - | AddSubflowOperation - | RemoveSubflowOperation - | MoveBlockOperation - | MoveSubflowOperation + | BatchAddEdgesOperation + | BatchRemoveEdgesOperation + | BatchMoveBlocksOperation | UpdateParentOperation + | BatchUpdateParentOperation + | BatchToggleEnabledOperation + | BatchToggleHandlesOperation | ApplyDiffOperation | AcceptDiffOperation | RejectDiffOperation diff --git a/apps/sim/stores/undo-redo/utils.ts b/apps/sim/stores/undo-redo/utils.ts index e00209c203..e747c2fd2d 100644 --- a/apps/sim/stores/undo-redo/utils.ts +++ b/apps/sim/stores/undo-redo/utils.ts @@ -1,6 +1,11 @@ +import { UNDO_REDO_OPERATIONS } from '@/socket/constants' import type { BatchAddBlocksOperation, + BatchAddEdgesOperation, + BatchMoveBlocksOperation, BatchRemoveBlocksOperation, + BatchRemoveEdgesOperation, + BatchUpdateParentOperation, Operation, OperationEntry, } from '@/stores/undo-redo/types' @@ -16,11 +21,11 @@ export function createOperationEntry(operation: Operation, inverse: Operation): export function createInverseOperation(operation: Operation): Operation { switch (operation.type) { - case 'batch-add-blocks': { + case UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS: { const op = operation as BatchAddBlocksOperation return { ...operation, - type: 'batch-remove-blocks', + type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS, data: { blockSnapshots: op.data.blockSnapshots, edgeSnapshots: op.data.edgeSnapshots, @@ -29,11 +34,11 @@ export function createInverseOperation(operation: Operation): Operation { } as BatchRemoveBlocksOperation } - case 'batch-remove-blocks': { + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: { const op = operation as BatchRemoveBlocksOperation return { ...operation, - type: 'batch-add-blocks', + type: UNDO_REDO_OPERATIONS.BATCH_ADD_BLOCKS, data: { blockSnapshots: op.data.blockSnapshots, edgeSnapshots: op.data.edgeSnapshots, @@ -42,65 +47,44 @@ export function createInverseOperation(operation: Operation): Operation { } as BatchAddBlocksOperation } - case 'add-edge': + case UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES: { + const op = operation as BatchAddEdgesOperation return { ...operation, - type: 'remove-edge', + type: UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES, data: { - edgeId: operation.data.edgeId, - edgeSnapshot: null, - }, - } - - case 'remove-edge': - return { - ...operation, - type: 'add-edge', - data: { - edgeId: operation.data.edgeId, - }, - } - - case 'add-subflow': - return { - ...operation, - type: 'remove-subflow', - data: { - subflowId: operation.data.subflowId, - subflowSnapshot: null, - }, - } - - case 'remove-subflow': - return { - ...operation, - type: 'add-subflow', - data: { - subflowId: operation.data.subflowId, + edgeSnapshots: op.data.edgeSnapshots, }, - } + } as BatchRemoveEdgesOperation + } - case 'move-block': + case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: { + const op = operation as BatchRemoveEdgesOperation return { ...operation, + type: UNDO_REDO_OPERATIONS.BATCH_ADD_EDGES, data: { - blockId: operation.data.blockId, - before: operation.data.after, - after: operation.data.before, + edgeSnapshots: op.data.edgeSnapshots, }, - } + } as BatchAddEdgesOperation + } - case 'move-subflow': + case UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS: { + const op = operation as BatchMoveBlocksOperation return { ...operation, + type: UNDO_REDO_OPERATIONS.BATCH_MOVE_BLOCKS, data: { - subflowId: operation.data.subflowId, - before: operation.data.after, - after: operation.data.before, + moves: op.data.moves.map((m) => ({ + blockId: m.blockId, + before: m.after, + after: m.before, + })), }, - } + } as BatchMoveBlocksOperation + } - case 'update-parent': + case UNDO_REDO_OPERATIONS.UPDATE_PARENT: return { ...operation, data: { @@ -113,7 +97,24 @@ export function createInverseOperation(operation: Operation): Operation { }, } - case 'apply-diff': + case UNDO_REDO_OPERATIONS.BATCH_UPDATE_PARENT: { + const op = operation as BatchUpdateParentOperation + return { + ...operation, + data: { + updates: op.data.updates.map((u) => ({ + blockId: u.blockId, + oldParentId: u.newParentId, + newParentId: u.oldParentId, + oldPosition: u.newPosition, + newPosition: u.oldPosition, + affectedEdges: u.affectedEdges, + })), + }, + } as BatchUpdateParentOperation + } + + case UNDO_REDO_OPERATIONS.APPLY_DIFF: return { ...operation, data: { @@ -123,7 +124,7 @@ export function createInverseOperation(operation: Operation): Operation { }, } - case 'accept-diff': + case UNDO_REDO_OPERATIONS.ACCEPT_DIFF: return { ...operation, data: { @@ -134,7 +135,7 @@ export function createInverseOperation(operation: Operation): Operation { }, } - case 'reject-diff': + case UNDO_REDO_OPERATIONS.REJECT_DIFF: return { ...operation, data: { @@ -145,130 +146,21 @@ export function createInverseOperation(operation: Operation): Operation { }, } - default: { - const exhaustiveCheck: never = operation - throw new Error(`Unhandled operation type: ${(exhaustiveCheck as Operation).type}`) - } - } -} - -export function operationToCollaborativePayload(operation: Operation): { - operation: string - target: string - payload: Record -} { - switch (operation.type) { - case 'batch-add-blocks': { - const op = operation as BatchAddBlocksOperation + case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_ENABLED: return { - operation: 'batch-add-blocks', - target: 'blocks', - payload: { - blocks: op.data.blockSnapshots, - edges: op.data.edgeSnapshots, - loops: {}, - parallels: {}, - subBlockValues: op.data.subBlockValues, - }, - } - } - - case 'batch-remove-blocks': { - const op = operation as BatchRemoveBlocksOperation - return { - operation: 'batch-remove-blocks', - target: 'blocks', - payload: { ids: op.data.blockSnapshots.map((b) => b.id) }, - } - } - - case 'add-edge': - return { - operation: 'add', - target: 'edge', - payload: { id: operation.data.edgeId }, - } - - case 'remove-edge': - return { - operation: 'remove', - target: 'edge', - payload: { id: operation.data.edgeId }, - } - - case 'add-subflow': - return { - operation: 'add', - target: 'subflow', - payload: { id: operation.data.subflowId }, - } - - case 'remove-subflow': - return { - operation: 'remove', - target: 'subflow', - payload: { id: operation.data.subflowId }, - } - - case 'move-block': - return { - operation: 'update-position', - target: 'block', - payload: { - id: operation.data.blockId, - x: operation.data.after.x, - y: operation.data.after.y, - parentId: operation.data.after.parentId, - }, - } - - case 'move-subflow': - return { - operation: 'update-position', - target: 'subflow', - payload: { - id: operation.data.subflowId, - x: operation.data.after.x, - y: operation.data.after.y, - }, - } - - case 'update-parent': - return { - operation: 'update-parent', - target: 'block', - payload: { - id: operation.data.blockId, - parentId: operation.data.newParentId, - x: operation.data.newPosition.x, - y: operation.data.newPosition.y, - }, - } - - case 'apply-diff': - return { - operation: 'apply-diff', - target: 'workflow', - payload: { - diffAnalysis: operation.data.diffAnalysis, - }, - } - - case 'accept-diff': - return { - operation: 'accept-diff', - target: 'workflow', - payload: { - diffAnalysis: operation.data.diffAnalysis, + ...operation, + data: { + blockIds: operation.data.blockIds, + previousStates: operation.data.previousStates, }, } - case 'reject-diff': + case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_HANDLES: return { - operation: 'reject-diff', - target: 'workflow', - payload: { - diffAnalysis: operation.data.diffAnalysis, + ...operation, + data: { + blockIds: operation.data.blockIds, + previousStates: operation.data.previousStates, }, } diff --git a/apps/sim/stores/workflows/subblock/types.ts b/apps/sim/stores/workflows/subblock/types.ts index 243e12bf01..25004313ba 100644 --- a/apps/sim/stores/workflows/subblock/types.ts +++ b/apps/sim/stores/workflows/subblock/types.ts @@ -1,13 +1,25 @@ -export interface SubBlockState { - workflowValues: Record>> // Store values per workflow ID +import type { BlockState } from '@/stores/workflows/workflow/types' + +/** + * Value type for subblock values. + * Uses unknown to support various value types that subblocks can store, + * including strings, numbers, arrays, objects, and other complex structures. + */ +export type SubBlockValue = unknown + +export interface SubBlockStoreState { + workflowValues: Record>> // Store values per workflow ID loadingWebhooks: Set // Track which blockIds are currently loading webhooks checkedWebhooks: Set // Track which blockIds have been checked for webhooks } -export interface SubBlockStore extends SubBlockState { - setValue: (blockId: string, subBlockId: string, value: any) => void - getValue: (blockId: string, subBlockId: string) => any +export interface SubBlockStore extends SubBlockStoreState { + setValue: (blockId: string, subBlockId: string, value: SubBlockValue) => void + getValue: (blockId: string, subBlockId: string) => SubBlockValue | undefined clear: () => void - initializeFromWorkflow: (workflowId: string, blocks: Record) => void - setWorkflowValues: (workflowId: string, values: Record>) => void + initializeFromWorkflow: (workflowId: string, blocks: Record) => void + setWorkflowValues: ( + workflowId: string, + values: Record> + ) => void } diff --git a/apps/sim/stores/workflows/utils.ts b/apps/sim/stores/workflows/utils.ts index 2eb2c4618a..ac0f529870 100644 --- a/apps/sim/stores/workflows/utils.ts +++ b/apps/sim/stores/workflows/utils.ts @@ -271,7 +271,9 @@ export function mergeSubblockState( subAcc[subBlockId] = { ...subBlock, - value: storedValue !== undefined && storedValue !== null ? storedValue : subBlock.value, + value: (storedValue !== undefined && storedValue !== null + ? storedValue + : subBlock.value) as SubBlockState['value'], } return subAcc @@ -288,7 +290,7 @@ export function mergeSubblockState( mergedSubBlocks[subBlockId] = { id: subBlockId, type: 'short-input', // Default type that's safe to use - value: value, + value: value as SubBlockState['value'], } } }) @@ -353,8 +355,9 @@ export async function mergeSubblockStateAsync( subBlockId, { ...subBlock, - value: - storedValue !== undefined && storedValue !== null ? storedValue : subBlock.value, + value: (storedValue !== undefined && storedValue !== null + ? storedValue + : subBlock.value) as SubBlockState['value'], }, ] as const }) @@ -376,7 +379,7 @@ export async function mergeSubblockStateAsync( mergedSubBlocks[subBlockId] = { id: subBlockId, type: 'short-input', - value: value, + value: value as SubBlockState['value'], } } }) @@ -425,14 +428,8 @@ function updateBlockReferences( clearTriggerRuntimeValues = false ): void { Object.entries(blocks).forEach(([_, block]) => { - if (block.data?.parentId) { - const newParentId = idMap.get(block.data.parentId) - if (newParentId) { - block.data = { ...block.data, parentId: newParentId } - } else { - block.data = { ...block.data, parentId: undefined, extent: undefined } - } - } + // NOTE: parentId remapping is handled in regenerateBlockIds' second pass. + // Do NOT remap parentId here as it would incorrectly clear already-mapped IDs. if (block.subBlocks) { Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => { @@ -462,6 +459,7 @@ export function regenerateWorkflowIds( const nameMap = new Map() const newBlocks: Record = {} + // First pass: generate new IDs Object.entries(workflowState.blocks).forEach(([oldId, block]) => { const newId = uuidv4() blockIdMap.set(oldId, newId) @@ -470,6 +468,19 @@ export function regenerateWorkflowIds( newBlocks[newId] = { ...block, id: newId } }) + // Second pass: update parentId references + Object.values(newBlocks).forEach((block) => { + if (block.data?.parentId) { + const newParentId = blockIdMap.get(block.data.parentId) + if (newParentId) { + block.data = { ...block.data, parentId: newParentId } + } else { + // Parent not in the workflow, clear the relationship + block.data = { ...block.data, parentId: undefined, extent: undefined } + } + } + }) + const newEdges = workflowState.edges.map((edge) => ({ ...edge, id: uuidv4(), @@ -532,6 +543,7 @@ export function regenerateBlockIds( // Track all blocks for name uniqueness (existing + newly processed) const allBlocksForNaming = { ...existingBlockNames } + // First pass: generate new IDs and names for all blocks Object.entries(blocks).forEach(([oldId, block]) => { const newId = uuidv4() blockIdMap.set(oldId, newId) @@ -541,17 +553,22 @@ export function regenerateBlockIds( const newNormalizedName = normalizeName(newName) nameMap.set(oldNormalizedName, newNormalizedName) - const isNested = !!block.data?.parentId + // Check if this block has a parent that's also being copied + // If so, it's a nested block and should keep its relative position (no offset) + // Only top-level blocks (no parent in the paste set) get the position offset + const hasParentInPasteSet = block.data?.parentId && blocks[block.data.parentId] + const newPosition = hasParentInPasteSet + ? { x: block.position.x, y: block.position.y } // Keep relative position + : { x: block.position.x + positionOffset.x, y: block.position.y + positionOffset.y } + + // Placeholder block - we'll update parentId in second pass const newBlock: BlockState = { ...block, id: newId, name: newName, - position: isNested - ? block.position - : { - x: block.position.x + positionOffset.x, - y: block.position.y + positionOffset.y, - }, + position: newPosition, + // Temporarily keep data as-is, we'll fix parentId in second pass + data: block.data ? { ...block.data } : block.data, } newBlocks[newId] = newBlock @@ -563,6 +580,25 @@ export function regenerateBlockIds( } }) + // Second pass: update parentId references for nested blocks + // If a block's parent is also being pasted, map to new parentId; otherwise clear it + Object.entries(newBlocks).forEach(([, block]) => { + if (block.data?.parentId) { + const oldParentId = block.data.parentId + const newParentId = blockIdMap.get(oldParentId) + + if (newParentId) { + block.data = { + ...block.data, + parentId: newParentId, + extent: 'parent', + } + } else { + block.data = { ...block.data, parentId: undefined, extent: undefined } + } + } + }) + const newEdges = edges.map((edge) => ({ ...edge, id: uuidv4(), diff --git a/apps/sim/stores/workflows/workflow/store.ts b/apps/sim/stores/workflows/workflow/store.ts index 41d3051637..36475ba462 100644 --- a/apps/sim/stores/workflows/workflow/store.ts +++ b/apps/sim/stores/workflows/workflow/store.ts @@ -174,6 +174,7 @@ export const useWorkflowStore = create()( ...data, ...(parentId && { parentId, extent: extent || 'parent' }), } + // #endregion const subBlocks: Record = {} const subBlockStore = useSubBlockStore.getState() @@ -295,26 +296,16 @@ export const useWorkflowStore = create()( return } - logger.info('UpdateParentId called:', { - blockId: id, - blockName: block.name, - blockType: block.type, - newParentId: parentId, - extent, - currentParentId: block.data?.parentId, - }) + if (parentId === id) { + logger.error('Blocked attempt to set block as its own parent', { blockId: id }) + return + } - // Skip if the parent ID hasn't changed if (block.data?.parentId === parentId) { - logger.info('Parent ID unchanged, skipping update') return } - // Store current absolute position const absolutePosition = { ...block.position } - - // Handle empty or null parentId (removing from parent) - // On removal, clear the data JSON entirely per normalized DB contract const newData = !parentId ? {} : { @@ -323,8 +314,6 @@ export const useWorkflowStore = create()( extent, } - // For removal we already set data to {}; for setting a parent keep as-is - const newState = { blocks: { ...get().blocks, @@ -339,12 +328,6 @@ export const useWorkflowStore = create()( parallels: { ...get().parallels }, } - logger.info('[WorkflowStore/updateParentId] Updated parentId relationship:', { - blockId: id, - newParentId: parentId || 'None (removed parent)', - keepingPosition: absolutePosition, - }) - set(newState) get().updateLastSaved() // Note: Socket.IO handles real-time sync automatically @@ -586,6 +569,27 @@ export const useWorkflowStore = create()( // Note: Socket.IO handles real-time sync automatically }, + setBlockEnabled: (id: string, enabled: boolean) => { + const block = get().blocks[id] + if (!block || block.enabled === enabled) return + + const newState = { + blocks: { + ...get().blocks, + [id]: { + ...block, + enabled, + }, + }, + edges: [...get().edges], + loops: { ...get().loops }, + parallels: { ...get().parallels }, + } + + set(newState) + get().updateLastSaved() + }, + duplicateBlock: (id: string) => { const block = get().blocks[id] if (!block) return @@ -668,6 +672,26 @@ export const useWorkflowStore = create()( // Note: Socket.IO handles real-time sync automatically }, + setBlockHandles: (id: string, horizontalHandles: boolean) => { + const block = get().blocks[id] + if (!block || block.horizontalHandles === horizontalHandles) return + + const newState = { + blocks: { + ...get().blocks, + [id]: { + ...block, + horizontalHandles, + }, + }, + edges: [...get().edges], + loops: { ...get().loops }, + } + + set(newState) + get().updateLastSaved() + }, + updateBlockName: (id: string, name: string) => { const oldBlock = get().blocks[id] if (!oldBlock) return { success: false, changedSubblocks: [] } diff --git a/apps/sim/stores/workflows/workflow/types.ts b/apps/sim/stores/workflows/workflow/types.ts index c836b8040c..97fcf033a8 100644 --- a/apps/sim/stores/workflows/workflow/types.ts +++ b/apps/sim/stores/workflows/workflow/types.ts @@ -1,5 +1,5 @@ import type { Edge } from 'reactflow' -import type { BlockOutput, SubBlockType } from '@/blocks/types' +import type { OutputFieldDefinition, SubBlockType } from '@/blocks/types' import type { DeploymentStatus } from '@/stores/workflows/registry/types' export const SUBFLOW_TYPES = { @@ -17,14 +17,14 @@ export interface LoopConfig { nodes: string[] iterations: number loopType: 'for' | 'forEach' | 'while' | 'doWhile' - forEachItems?: any[] | Record | string + forEachItems?: unknown[] | Record | string whileCondition?: string // JS expression that evaluates to boolean (for while loops) doWhileCondition?: string // JS expression that evaluates to boolean (for do-while loops) } export interface ParallelConfig { nodes: string[] - distribution?: any[] | Record | string + distribution?: unknown[] | Record | string parallelType?: 'count' | 'collection' } @@ -76,7 +76,7 @@ export interface BlockState { name: string position: Position subBlocks: Record - outputs: Record + outputs: Record enabled: boolean horizontalHandles?: boolean height?: number @@ -137,6 +137,13 @@ export interface Parallel { parallelType?: 'count' | 'collection' // Explicit parallel type to avoid inference bugs } +export interface Variable { + id: string + name: string + type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain' + value: unknown +} + export interface DragStartPosition { id: string x: number @@ -156,12 +163,7 @@ export interface WorkflowState { description?: string exportedAt?: string } - variables?: Array<{ - id: string - name: string - type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain' - value: any - }> + variables?: Record isDeployed?: boolean deployedAt?: Date deploymentStatuses?: Record @@ -195,8 +197,10 @@ export interface WorkflowActions { clear: () => Partial updateLastSaved: () => void toggleBlockEnabled: (id: string) => void + setBlockEnabled: (id: string, enabled: boolean) => void duplicateBlock: (id: string) => void toggleBlockHandles: (id: string) => void + setBlockHandles: (id: string, horizontalHandles: boolean) => void updateBlockName: ( id: string, name: string diff --git a/packages/testing/src/factories/index.ts b/packages/testing/src/factories/index.ts index 4a7f456b16..2fafe98625 100644 --- a/packages/testing/src/factories/index.ts +++ b/packages/testing/src/factories/index.ts @@ -120,24 +120,25 @@ export { } from './serialized-block.factory' // Undo/redo operation factories export { - type AddEdgeOperation, type BaseOperation, type BatchAddBlocksOperation, + type BatchAddEdgesOperation, + type BatchMoveBlocksOperation, type BatchRemoveBlocksOperation, + type BatchRemoveEdgesOperation, + type BatchUpdateParentOperation, createAddBlockEntry, createAddEdgeEntry, + createBatchRemoveEdgesEntry, + createBatchUpdateParentEntry, createMoveBlockEntry, createRemoveBlockEntry, - createRemoveEdgeEntry, createUpdateParentEntry, - type MoveBlockOperation, type Operation, type OperationEntry, type OperationType, - type RemoveEdgeOperation, type UpdateParentOperation, } from './undo-redo.factory' -// User/workspace factories export { createUser, createUserWithWorkspace, @@ -147,7 +148,6 @@ export { type WorkflowObjectFactoryOptions, type WorkspaceFactoryOptions, } from './user.factory' -// Workflow factories export { createBranchingWorkflow, createLinearWorkflow, diff --git a/packages/testing/src/factories/undo-redo.factory.ts b/packages/testing/src/factories/undo-redo.factory.ts index d03c8cefe6..86c26ef927 100644 --- a/packages/testing/src/factories/undo-redo.factory.ts +++ b/packages/testing/src/factories/undo-redo.factory.ts @@ -8,10 +8,11 @@ import { nanoid } from 'nanoid' export type OperationType = | 'batch-add-blocks' | 'batch-remove-blocks' - | 'add-edge' - | 'remove-edge' - | 'move-block' + | 'batch-add-edges' + | 'batch-remove-edges' + | 'batch-move-blocks' | 'update-parent' + | 'batch-update-parent' /** * Base operation interface. @@ -25,14 +26,16 @@ export interface BaseOperation { } /** - * Move block operation data. + * Batch move blocks operation data. */ -export interface MoveBlockOperation extends BaseOperation { - type: 'move-block' +export interface BatchMoveBlocksOperation extends BaseOperation { + type: 'batch-move-blocks' data: { - blockId: string - before: { x: number; y: number; parentId?: string } - after: { x: number; y: number; parentId?: string } + moves: Array<{ + blockId: string + before: { x: number; y: number; parentId?: string } + after: { x: number; y: number; parentId?: string } + }> } } @@ -61,19 +64,19 @@ export interface BatchRemoveBlocksOperation extends BaseOperation { } /** - * Add edge operation data. + * Batch add edges operation data. */ -export interface AddEdgeOperation extends BaseOperation { - type: 'add-edge' - data: { edgeId: string } +export interface BatchAddEdgesOperation extends BaseOperation { + type: 'batch-add-edges' + data: { edgeSnapshots: any[] } } /** - * Remove edge operation data. + * Batch remove edges operation data. */ -export interface RemoveEdgeOperation extends BaseOperation { - type: 'remove-edge' - data: { edgeId: string; edgeSnapshot: any } +export interface BatchRemoveEdgesOperation extends BaseOperation { + type: 'batch-remove-edges' + data: { edgeSnapshots: any[] } } /** @@ -90,13 +93,28 @@ export interface UpdateParentOperation extends BaseOperation { } } +export interface BatchUpdateParentOperation extends BaseOperation { + type: 'batch-update-parent' + data: { + updates: Array<{ + blockId: string + oldParentId?: string + newParentId?: string + oldPosition: { x: number; y: number } + newPosition: { x: number; y: number } + affectedEdges?: any[] + }> + } +} + export type Operation = | BatchAddBlocksOperation | BatchRemoveBlocksOperation - | AddEdgeOperation - | RemoveEdgeOperation - | MoveBlockOperation + | BatchAddEdgesOperation + | BatchRemoveEdgesOperation + | BatchMoveBlocksOperation | UpdateParentOperation + | BatchUpdateParentOperation /** * Operation entry with forward and inverse operations. @@ -208,40 +226,45 @@ export function createRemoveBlockEntry( } /** - * Creates a mock add-edge operation entry. + * Creates a mock batch-add-edges operation entry for a single edge. */ -export function createAddEdgeEntry(edgeId: string, options: OperationEntryOptions = {}): any { +export function createAddEdgeEntry( + edgeId: string, + edgeSnapshot: any = null, + options: OperationEntryOptions = {} +): any { const { id = nanoid(8), workflowId = 'wf-1', userId = 'user-1', createdAt = Date.now() } = options const timestamp = Date.now() + const snapshot = edgeSnapshot || { id: edgeId, source: 'block-1', target: 'block-2' } + return { id, createdAt, operation: { id: nanoid(8), - type: 'add-edge', + type: 'batch-add-edges', timestamp, workflowId, userId, - data: { edgeId }, + data: { edgeSnapshots: [snapshot] }, }, inverse: { id: nanoid(8), - type: 'remove-edge', + type: 'batch-remove-edges', timestamp, workflowId, userId, - data: { edgeId, edgeSnapshot: null }, + data: { edgeSnapshots: [snapshot] }, }, } } /** - * Creates a mock remove-edge operation entry. + * Creates a mock batch-remove-edges operation entry. */ -export function createRemoveEdgeEntry( - edgeId: string, - edgeSnapshot: any = null, +export function createBatchRemoveEdgesEntry( + edgeSnapshots: any[], options: OperationEntryOptions = {} ): any { const { id = nanoid(8), workflowId = 'wf-1', userId = 'user-1', createdAt = Date.now() } = options @@ -252,19 +275,19 @@ export function createRemoveEdgeEntry( createdAt, operation: { id: nanoid(8), - type: 'remove-edge', + type: 'batch-remove-edges', timestamp, workflowId, userId, - data: { edgeId, edgeSnapshot }, + data: { edgeSnapshots }, }, inverse: { id: nanoid(8), - type: 'add-edge', + type: 'batch-add-edges', timestamp, workflowId, userId, - data: { edgeId }, + data: { edgeSnapshots }, }, } } @@ -275,7 +298,7 @@ interface MoveBlockOptions extends OperationEntryOptions { } /** - * Creates a mock move-block operation entry. + * Creates a mock batch-move-blocks operation entry for a single block. */ export function createMoveBlockEntry(blockId: string, options: MoveBlockOptions = {}): any { const { @@ -293,19 +316,19 @@ export function createMoveBlockEntry(blockId: string, options: MoveBlockOptions createdAt, operation: { id: nanoid(8), - type: 'move-block', + type: 'batch-move-blocks', timestamp, workflowId, userId, - data: { blockId, before, after }, + data: { moves: [{ blockId, before, after }] }, }, inverse: { id: nanoid(8), - type: 'move-block', + type: 'batch-move-blocks', timestamp, workflowId, userId, - data: { blockId, before: after, after: before }, + data: { moves: [{ blockId, before: after, after: before }] }, }, } } @@ -361,3 +384,75 @@ export function createUpdateParentEntry( }, } } + +interface BatchUpdateParentOptions extends OperationEntryOptions { + updates?: Array<{ + blockId: string + oldParentId?: string + newParentId?: string + oldPosition?: { x: number; y: number } + newPosition?: { x: number; y: number } + affectedEdges?: any[] + }> +} + +/** + * Creates a mock batch-update-parent operation entry. + */ +export function createBatchUpdateParentEntry(options: BatchUpdateParentOptions = {}): any { + const { + id = nanoid(8), + workflowId = 'wf-1', + userId = 'user-1', + createdAt = Date.now(), + updates = [ + { + blockId: 'block-1', + oldParentId: undefined, + newParentId: 'loop-1', + oldPosition: { x: 0, y: 0 }, + newPosition: { x: 50, y: 50 }, + }, + ], + } = options + const timestamp = Date.now() + + const processedUpdates = updates.map((u) => ({ + blockId: u.blockId, + oldParentId: u.oldParentId, + newParentId: u.newParentId, + oldPosition: u.oldPosition || { x: 0, y: 0 }, + newPosition: u.newPosition || { x: 50, y: 50 }, + affectedEdges: u.affectedEdges, + })) + + return { + id, + createdAt, + operation: { + id: nanoid(8), + type: 'batch-update-parent', + timestamp, + workflowId, + userId, + data: { updates: processedUpdates }, + }, + inverse: { + id: nanoid(8), + type: 'batch-update-parent', + timestamp, + workflowId, + userId, + data: { + updates: processedUpdates.map((u) => ({ + blockId: u.blockId, + oldParentId: u.newParentId, + newParentId: u.oldParentId, + oldPosition: u.newPosition, + newPosition: u.oldPosition, + affectedEdges: u.affectedEdges, + })), + }, + }, + } +} From 860610b4c2ad1b722ab5496e5269d7e1b2b11868 Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Fri, 9 Jan 2026 16:36:45 -0800 Subject: [PATCH 05/16] improvement(billing): team upgrade + session management (#2751) * improvement(billng): team upgrade + session management * remove comments * session updates should be atomic * make consistent for onSubscritionUpdate * plan upgrade to refresh session * fix var name * remove dead code * preserve params --- .../app/_shell/providers/session-provider.tsx | 29 ++++- .../components/subscription/subscription.tsx | 45 ++++--- .../settings-modal/settings-modal.tsx | 1 + apps/sim/lib/auth/auth.ts | 36 +++++- apps/sim/lib/billing/client/upgrade.ts | 5 +- apps/sim/lib/billing/organization.ts | 119 +++++++++++------- 6 files changed, 162 insertions(+), 73 deletions(-) diff --git a/apps/sim/app/_shell/providers/session-provider.tsx b/apps/sim/app/_shell/providers/session-provider.tsx index 70fe344bd0..29ab636e74 100644 --- a/apps/sim/app/_shell/providers/session-provider.tsx +++ b/apps/sim/app/_shell/providers/session-provider.tsx @@ -2,6 +2,7 @@ import type React from 'react' import { createContext, useCallback, useEffect, useMemo, useState } from 'react' +import { useQueryClient } from '@tanstack/react-query' import posthog from 'posthog-js' import { client } from '@/lib/auth/auth-client' @@ -35,12 +36,15 @@ export function SessionProvider({ children }: { children: React.ReactNode }) { const [data, setData] = useState(null) const [isPending, setIsPending] = useState(true) const [error, setError] = useState(null) + const queryClient = useQueryClient() - const loadSession = useCallback(async () => { + const loadSession = useCallback(async (bypassCache = false) => { try { setIsPending(true) setError(null) - const res = await client.getSession() + const res = bypassCache + ? await client.getSession({ query: { disableCookieCache: true } }) + : await client.getSession() setData(res?.data ?? null) } catch (e) { setError(e instanceof Error ? e : new Error('Failed to fetch session')) @@ -50,8 +54,25 @@ export function SessionProvider({ children }: { children: React.ReactNode }) { }, []) useEffect(() => { - loadSession() - }, [loadSession]) + // Check if user was redirected after plan upgrade + const params = new URLSearchParams(window.location.search) + const wasUpgraded = params.get('upgraded') === 'true' + + if (wasUpgraded) { + params.delete('upgraded') + const newUrl = params.toString() + ? `${window.location.pathname}?${params.toString()}` + : window.location.pathname + window.history.replaceState({}, '', newUrl) + } + + loadSession(wasUpgraded).then(() => { + if (wasUpgraded) { + queryClient.invalidateQueries({ queryKey: ['organizations'] }) + queryClient.invalidateQueries({ queryKey: ['subscription'] }) + } + }) + }, [loadSession, queryClient]) useEffect(() => { if (isPending || typeof posthog.identify !== 'function') { diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx index 5eafe5b90c..c9c7995259 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx @@ -8,6 +8,7 @@ import { Skeleton } from '@/components/ui' import { useSession } from '@/lib/auth/auth-client' import { useSubscriptionUpgrade } from '@/lib/billing/client/upgrade' import { USAGE_THRESHOLDS } from '@/lib/billing/client/usage-visualization' +import { getEffectiveSeats } from '@/lib/billing/subscriptions/utils' import { cn } from '@/lib/core/utils/cn' import { getBaseUrl } from '@/lib/core/utils/urls' import { getUserRole } from '@/lib/workspaces/organization/utils' @@ -191,7 +192,13 @@ export function Subscription() { const [upgradeError, setUpgradeError] = useState<'pro' | 'team' | null>(null) const usageLimitRef = useRef(null) - const isLoading = isSubscriptionLoading || isUsageLimitLoading || isWorkspaceLoading + const isOrgPlan = + subscriptionData?.data?.plan === 'team' || subscriptionData?.data?.plan === 'enterprise' + const isLoading = + isSubscriptionLoading || + isUsageLimitLoading || + isWorkspaceLoading || + (isOrgPlan && isOrgBillingLoading) const subscription = { isFree: subscriptionData?.data?.plan === 'free' || !subscriptionData?.data?.plan, @@ -204,7 +211,7 @@ export function Subscription() { subscriptionData?.data?.status === 'active', plan: subscriptionData?.data?.plan || 'free', status: subscriptionData?.data?.status || 'inactive', - seats: organizationBillingData?.totalSeats ?? 0, + seats: getEffectiveSeats(subscriptionData?.data), } const usage = { @@ -445,16 +452,10 @@ export function Subscription() { ? `${subscription.seats} seats` : undefined } - current={ - subscription.isEnterprise || subscription.isTeam - ? (organizationBillingData?.totalCurrentUsage ?? usage.current) - : usage.current - } + current={usage.current} limit={ subscription.isEnterprise || subscription.isTeam - ? organizationBillingData?.totalUsageLimit || - organizationBillingData?.minimumBillingAmount || - usage.limit + ? organizationBillingData?.data?.totalUsageLimit : !subscription.isFree && (permissions.canEditUsageLimit || permissions.showTeamMemberView) ? usage.current // placeholder; rightContent will render UsageLimit @@ -468,19 +469,31 @@ export function Subscription() { { logger.info('Usage limit updated') }} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx index 050f2757cc..bdebb7eeb3 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/settings-modal.tsx @@ -174,6 +174,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) { const userEmail = session?.user?.email const userId = session?.user?.id + const userRole = getUserRole(activeOrganization, userEmail) const isOwner = userRole === 'owner' const isAdmin = userRole === 'admin' diff --git a/apps/sim/lib/auth/auth.ts b/apps/sim/lib/auth/auth.ts index 43e4c919ad..38d9e80754 100644 --- a/apps/sim/lib/auth/auth.ts +++ b/apps/sim/lib/auth/auth.ts @@ -2184,8 +2184,22 @@ export const auth = betterAuth({ status: subscription.status, }) - const resolvedSubscription = - await ensureOrganizationForTeamSubscription(subscription) + let resolvedSubscription = subscription + try { + resolvedSubscription = await ensureOrganizationForTeamSubscription(subscription) + } catch (orgError) { + logger.error( + '[onSubscriptionComplete] Failed to ensure organization for team subscription', + { + subscriptionId: subscription.id, + referenceId: subscription.referenceId, + plan: subscription.plan, + error: orgError instanceof Error ? orgError.message : String(orgError), + stack: orgError instanceof Error ? orgError.stack : undefined, + } + ) + throw orgError + } await handleSubscriptionCreated(resolvedSubscription) @@ -2206,8 +2220,22 @@ export const auth = betterAuth({ plan: subscription.plan, }) - const resolvedSubscription = - await ensureOrganizationForTeamSubscription(subscription) + let resolvedSubscription = subscription + try { + resolvedSubscription = await ensureOrganizationForTeamSubscription(subscription) + } catch (orgError) { + logger.error( + '[onSubscriptionUpdate] Failed to ensure organization for team subscription', + { + subscriptionId: subscription.id, + referenceId: subscription.referenceId, + plan: subscription.plan, + error: orgError instanceof Error ? orgError.message : String(orgError), + stack: orgError instanceof Error ? orgError.stack : undefined, + } + ) + throw orgError + } try { await syncSubscriptionUsageLimits(resolvedSubscription) diff --git a/apps/sim/lib/billing/client/upgrade.ts b/apps/sim/lib/billing/client/upgrade.ts index 953f585a94..acd7e651ce 100644 --- a/apps/sim/lib/billing/client/upgrade.ts +++ b/apps/sim/lib/billing/client/upgrade.ts @@ -81,12 +81,15 @@ export function useSubscriptionUpgrade() { } const currentUrl = `${window.location.origin}${window.location.pathname}` + const successUrlObj = new URL(window.location.href) + successUrlObj.searchParams.set('upgraded', 'true') + const successUrl = successUrlObj.toString() try { const upgradeParams = { plan: targetPlan, referenceId, - successUrl: currentUrl, + successUrl, cancelUrl: currentUrl, ...(targetPlan === 'team' && { seats: CONSTANTS.INITIAL_TEAM_SEATS }), } as const diff --git a/apps/sim/lib/billing/organization.ts b/apps/sim/lib/billing/organization.ts index 579dfbd886..eff6a03c0c 100644 --- a/apps/sim/lib/billing/organization.ts +++ b/apps/sim/lib/billing/organization.ts @@ -1,5 +1,11 @@ import { db } from '@sim/db' -import * as schema from '@sim/db/schema' +import { + member, + organization, + session, + subscription as subscriptionTable, + user, +} from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { getPlanPricing } from '@/lib/billing/core/billing' @@ -20,16 +26,16 @@ type SubscriptionData = { */ async function getUserOwnedOrganization(userId: string): Promise { const existingMemberships = await db - .select({ organizationId: schema.member.organizationId }) - .from(schema.member) - .where(and(eq(schema.member.userId, userId), eq(schema.member.role, 'owner'))) + .select({ organizationId: member.organizationId }) + .from(member) + .where(and(eq(member.userId, userId), eq(member.role, 'owner'))) .limit(1) if (existingMemberships.length > 0) { const [existingOrg] = await db - .select({ id: schema.organization.id }) - .from(schema.organization) - .where(eq(schema.organization.id, existingMemberships[0].organizationId)) + .select({ id: organization.id }) + .from(organization) + .where(eq(organization.id, existingMemberships[0].organizationId)) .limit(1) return existingOrg?.id || null @@ -40,6 +46,8 @@ async function getUserOwnedOrganization(userId: string): Promise /** * Create a new organization and add user as owner + * Uses transaction to ensure org + member are created atomically + * Also updates user's active sessions to set the new org as active */ async function createOrganizationWithOwner( userId: string, @@ -48,32 +56,40 @@ async function createOrganizationWithOwner( metadata: Record = {} ): Promise { const orgId = `org_${crypto.randomUUID()}` + let sessionsUpdated = 0 - const [newOrg] = await db - .insert(schema.organization) - .values({ + await db.transaction(async (tx) => { + await tx.insert(organization).values({ id: orgId, name: organizationName, slug: organizationSlug, metadata, }) - .returning({ id: schema.organization.id }) - - // Add user as owner/admin of the organization - await db.insert(schema.member).values({ - id: crypto.randomUUID(), - userId: userId, - organizationId: newOrg.id, - role: 'owner', + + await tx.insert(member).values({ + id: crypto.randomUUID(), + userId: userId, + organizationId: orgId, + role: 'owner', + }) + + const updatedSessions = await tx + .update(session) + .set({ activeOrganizationId: orgId }) + .where(eq(session.userId, userId)) + .returning({ id: session.id }) + + sessionsUpdated = updatedSessions.length }) logger.info('Created organization with owner', { userId, - organizationId: newOrg.id, + organizationId: orgId, organizationName, + sessionsUpdated, }) - return newOrg.id + return orgId } export async function createOrganizationForTeamPlan( @@ -132,12 +148,12 @@ export async function ensureOrganizationForTeamSubscription( const existingMembership = await db .select({ - id: schema.member.id, - organizationId: schema.member.organizationId, - role: schema.member.role, + id: member.id, + organizationId: member.organizationId, + role: member.role, }) - .from(schema.member) - .where(eq(schema.member.userId, userId)) + .from(member) + .where(eq(member.userId, userId)) .limit(1) if (existingMembership.length > 0) { @@ -148,10 +164,17 @@ export async function ensureOrganizationForTeamSubscription( organizationId: membership.organizationId, }) - await db - .update(schema.subscription) - .set({ referenceId: membership.organizationId }) - .where(eq(schema.subscription.id, subscription.id)) + await db.transaction(async (tx) => { + await tx + .update(subscriptionTable) + .set({ referenceId: membership.organizationId }) + .where(eq(subscriptionTable.id, subscription.id)) + + await tx + .update(session) + .set({ activeOrganizationId: membership.organizationId }) + .where(eq(session.userId, userId)) + }) return { ...subscription, referenceId: membership.organizationId } } @@ -165,9 +188,9 @@ export async function ensureOrganizationForTeamSubscription( } const [userData] = await db - .select({ name: schema.user.name, email: schema.user.email }) - .from(schema.user) - .where(eq(schema.user.id, userId)) + .select({ name: user.name, email: user.email }) + .from(user) + .where(eq(user.id, userId)) .limit(1) const orgId = await createOrganizationForTeamPlan( @@ -177,9 +200,9 @@ export async function ensureOrganizationForTeamSubscription( ) await db - .update(schema.subscription) + .update(subscriptionTable) .set({ referenceId: orgId }) - .where(eq(schema.subscription.id, subscription.id)) + .where(eq(subscriptionTable.id, subscription.id)) logger.info('Created organization and updated subscription referenceId', { subscriptionId: subscription.id, @@ -204,9 +227,9 @@ export async function syncSubscriptionUsageLimits(subscription: SubscriptionData // Check if this is a user or organization subscription const users = await db - .select({ id: schema.user.id }) - .from(schema.user) - .where(eq(schema.user.id, subscription.referenceId)) + .select({ id: user.id }) + .from(user) + .where(eq(user.id, subscription.referenceId)) .limit(1) if (users.length > 0) { @@ -230,9 +253,9 @@ export async function syncSubscriptionUsageLimits(subscription: SubscriptionData // Only set if not already set or if updating to a higher value based on seats const orgData = await db - .select({ orgUsageLimit: schema.organization.orgUsageLimit }) - .from(schema.organization) - .where(eq(schema.organization.id, organizationId)) + .select({ orgUsageLimit: organization.orgUsageLimit }) + .from(organization) + .where(eq(organization.id, organizationId)) .limit(1) const currentLimit = @@ -243,12 +266,12 @@ export async function syncSubscriptionUsageLimits(subscription: SubscriptionData // Update if no limit set, or if new seat-based minimum is higher if (currentLimit < orgLimit) { await db - .update(schema.organization) + .update(organization) .set({ orgUsageLimit: orgLimit.toFixed(2), updatedAt: new Date(), }) - .where(eq(schema.organization.id, organizationId)) + .where(eq(organization.id, organizationId)) logger.info('Set organization usage limit for team plan', { organizationId, @@ -262,17 +285,17 @@ export async function syncSubscriptionUsageLimits(subscription: SubscriptionData // Sync usage limits for all members const members = await db - .select({ userId: schema.member.userId }) - .from(schema.member) - .where(eq(schema.member.organizationId, organizationId)) + .select({ userId: member.userId }) + .from(member) + .where(eq(member.organizationId, organizationId)) if (members.length > 0) { - for (const member of members) { + for (const m of members) { try { - await syncUsageLimitsFromSubscription(member.userId) + await syncUsageLimitsFromSubscription(m.userId) } catch (memberError) { logger.error('Failed to sync usage limits for organization member', { - userId: member.userId, + userId: m.userId, organizationId, subscriptionId: subscription.id, error: memberError, From d3d6012d5c664a228e9e730e96b4a777bc1e2b3f Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 9 Jan 2026 16:53:37 -0800 Subject: [PATCH 06/16] fix(tools): updated memory block to throw better errors, removed deprecated posthog route, remove deprecated templates & console helpers (#2753) * fix(tools): updated memory block to throw better errors, removed deprecated posthog route, remove deprecated templates & console helpers * remove isDeployed in favor of deploymentStatus * ack PR comments --- apps/docs/components/icons.tsx | 19 ++ apps/docs/content/docs/en/tools/grain.mdx | 2 + apps/docs/content/docs/en/tools/linear.mdx | 23 +-- apps/docs/content/docs/en/tools/posthog.mdx | 24 --- apps/docs/content/docs/en/tools/translate.mdx | 3 + .../api/copilot/chat/update-title/route.ts | 50 ----- .../deployments/[version]/revert/route.ts | 3 - .../templates/components/template-card.tsx | 2 - .../templates/components/template-card.tsx | 2 - .../[workspaceId]/templates/templates.tsx | 57 ++---- .../diff-controls/diff-controls.tsx | 5 - .../hooks/use-current-workflow.ts | 6 - .../w/[workflowId]/utils/auto-layout-utils.ts | 1 - .../workspace/providers/socket-provider.tsx | 2 - apps/sim/blocks/blocks/memory.ts | 6 - apps/sim/blocks/blocks/posthog.ts | 68 +------ .../handlers/generic/generic-handler.ts | 10 +- apps/sim/hooks/use-collaborative-workflow.ts | 2 - apps/sim/lib/workflows/defaults.ts | 2 - apps/sim/stores/panel/copilot/store.ts | 2 - apps/sim/stores/terminal/console/store.ts | 30 --- apps/sim/stores/terminal/console/types.ts | 1 - apps/sim/stores/workflows/index.ts | 22 +- apps/sim/stores/workflows/registry/store.ts | 40 +--- apps/sim/stores/workflows/workflow/store.ts | 11 - apps/sim/stores/workflows/workflow/types.ts | 2 - apps/sim/tools/memory/add.ts | 48 +---- apps/sim/tools/memory/delete.ts | 28 +-- apps/sim/tools/memory/get.ts | 32 +-- apps/sim/tools/memory/get_all.ts | 15 +- apps/sim/tools/memory/helpers.ts | 25 --- apps/sim/tools/posthog/index.ts | 2 - apps/sim/tools/posthog/list_events.ts | 190 ------------------ apps/sim/tools/registry.ts | 2 - 34 files changed, 71 insertions(+), 666 deletions(-) delete mode 100644 apps/sim/app/api/copilot/chat/update-title/route.ts delete mode 100644 apps/sim/tools/memory/helpers.ts delete mode 100644 apps/sim/tools/posthog/list_events.ts diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx index 192905bead..de0ab92021 100644 --- a/apps/docs/components/icons.tsx +++ b/apps/docs/components/icons.tsx @@ -4575,3 +4575,22 @@ export function FirefliesIcon(props: SVGProps) { ) } + +export function BedrockIcon(props: SVGProps) { + return ( + + + + + + + + + + + ) +} diff --git a/apps/docs/content/docs/en/tools/grain.mdx b/apps/docs/content/docs/en/tools/grain.mdx index cd30c96139..7df544d8ae 100644 --- a/apps/docs/content/docs/en/tools/grain.mdx +++ b/apps/docs/content/docs/en/tools/grain.mdx @@ -162,6 +162,7 @@ Create a webhook to receive recording events | --------- | ---- | -------- | ----------- | | `apiKey` | string | Yes | Grain API key \(Personal Access Token\) | | `hookUrl` | string | Yes | Webhook endpoint URL \(must respond 2xx\) | +| `hookType` | string | Yes | Type of webhook: "recording_added" or "upload_status" | | `filterBeforeDatetime` | string | No | Filter: recordings before this date | | `filterAfterDatetime` | string | No | Filter: recordings after this date | | `filterParticipantScope` | string | No | Filter: "internal" or "external" | @@ -178,6 +179,7 @@ Create a webhook to receive recording events | `id` | string | Hook UUID | | `enabled` | boolean | Whether hook is active | | `hook_url` | string | The webhook URL | +| `hook_type` | string | Type of hook: recording_added or upload_status | | `filter` | object | Applied filters | | `include` | object | Included fields | | `inserted_at` | string | ISO8601 creation timestamp | diff --git a/apps/docs/content/docs/en/tools/linear.mdx b/apps/docs/content/docs/en/tools/linear.mdx index bf1814774a..c0d6daa5fd 100644 --- a/apps/docs/content/docs/en/tools/linear.mdx +++ b/apps/docs/content/docs/en/tools/linear.mdx @@ -851,24 +851,6 @@ List all status updates for a project in Linear | --------- | ---- | ----------- | | `updates` | array | Array of project updates | -### `linear_create_project_link` - -Add an external link to a project in Linear - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `projectId` | string | Yes | Project ID to add link to | -| `url` | string | Yes | URL of the external link | -| `label` | string | No | Link label/title | - -#### Output - -| Parameter | Type | Description | -| --------- | ---- | ----------- | -| `link` | object | The created project link | - ### `linear_list_notifications` List notifications for the current user in Linear @@ -1246,7 +1228,6 @@ Create a new project label in Linear | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `projectId` | string | Yes | The project for this label | | `name` | string | Yes | Project label name | | `color` | string | No | Label color \(hex code\) | | `description` | string | No | Label description | @@ -1424,12 +1405,12 @@ Create a new project status in Linear | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `projectId` | string | Yes | The project to create the status for | | `name` | string | Yes | Project status name | +| `type` | string | Yes | Status type: "backlog", "planned", "started", "paused", "completed", or "canceled" | | `color` | string | Yes | Status color \(hex code\) | +| `position` | number | Yes | Position in status list \(e.g. 0, 1, 2...\) | | `description` | string | No | Status description | | `indefinite` | boolean | No | Whether the status is indefinite | -| `position` | number | No | Position in status list | #### Output diff --git a/apps/docs/content/docs/en/tools/posthog.mdx b/apps/docs/content/docs/en/tools/posthog.mdx index bbc27e9e8f..c7acf1fdbe 100644 --- a/apps/docs/content/docs/en/tools/posthog.mdx +++ b/apps/docs/content/docs/en/tools/posthog.mdx @@ -79,30 +79,6 @@ Capture multiple events at once in PostHog. Use this for bulk event ingestion to | `status` | string | Status message indicating whether the batch was captured successfully | | `eventsProcessed` | number | Number of events processed in the batch | -### `posthog_list_events` - -List events in PostHog. Note: This endpoint is deprecated but kept for backwards compatibility. For production use, prefer the Query endpoint with HogQL. - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `personalApiKey` | string | Yes | PostHog Personal API Key \(for authenticated API access\) | -| `region` | string | No | PostHog region: us \(default\) or eu | -| `projectId` | string | Yes | PostHog Project ID | -| `limit` | number | No | Number of events to return \(default: 100, max: 100\) | -| `offset` | number | No | Number of events to skip for pagination | -| `event` | string | No | Filter by specific event name | -| `distinctId` | string | No | Filter by specific distinct_id | -| `before` | string | No | ISO 8601 timestamp - only return events before this time | -| `after` | string | No | ISO 8601 timestamp - only return events after this time | - -#### Output - -| Parameter | Type | Description | -| --------- | ---- | ----------- | -| `events` | array | List of events with their properties and metadata | - ### `posthog_list_persons` List persons (users) in PostHog. Returns user profiles with their properties and distinct IDs. diff --git a/apps/docs/content/docs/en/tools/translate.mdx b/apps/docs/content/docs/en/tools/translate.mdx index 790cc4d8bc..d28443a91b 100644 --- a/apps/docs/content/docs/en/tools/translate.mdx +++ b/apps/docs/content/docs/en/tools/translate.mdx @@ -53,6 +53,9 @@ Send a chat completion request to any supported LLM provider | `vertexProject` | string | No | Google Cloud project ID for Vertex AI | | `vertexLocation` | string | No | Google Cloud location for Vertex AI \(defaults to us-central1\) | | `vertexCredential` | string | No | Google Cloud OAuth credential ID for Vertex AI | +| `bedrockAccessKeyId` | string | No | AWS Access Key ID for Bedrock | +| `bedrockSecretKey` | string | No | AWS Secret Access Key for Bedrock | +| `bedrockRegion` | string | No | AWS region for Bedrock \(defaults to us-east-1\) | #### Output diff --git a/apps/sim/app/api/copilot/chat/update-title/route.ts b/apps/sim/app/api/copilot/chat/update-title/route.ts deleted file mode 100644 index 7c1451c642..0000000000 --- a/apps/sim/app/api/copilot/chat/update-title/route.ts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * @deprecated This route is not currently in use - * @remarks Kept for reference - may be removed in future cleanup - */ - -import { db } from '@sim/db' -import { copilotChats } from '@sim/db/schema' -import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' -import { type NextRequest, NextResponse } from 'next/server' -import { z } from 'zod' -import { getSession } from '@/lib/auth' - -const logger = createLogger('UpdateChatTitleAPI') - -const UpdateTitleSchema = z.object({ - chatId: z.string(), - title: z.string(), -}) - -export async function POST(request: NextRequest) { - try { - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 }) - } - - const body = await request.json() - const parsed = UpdateTitleSchema.parse(body) - - // Update the chat title - await db - .update(copilotChats) - .set({ - title: parsed.title, - updatedAt: new Date(), - }) - .where(eq(copilotChats.id, parsed.chatId)) - - logger.info('Chat title updated', { chatId: parsed.chatId, title: parsed.title }) - - return NextResponse.json({ success: true }) - } catch (error) { - logger.error('Error updating chat title:', error) - return NextResponse.json( - { success: false, error: 'Failed to update chat title' }, - { status: 500 } - ) - } -} diff --git a/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts b/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts index 5e8f43560f..80eb62fc99 100644 --- a/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts +++ b/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts @@ -74,8 +74,6 @@ export async function POST( loops: deployedState.loops || {}, parallels: deployedState.parallels || {}, lastSaved: Date.now(), - isDeployed: true, - deployedAt: new Date(), deploymentStatuses: deployedState.deploymentStatuses || {}, }) @@ -88,7 +86,6 @@ export async function POST( .set({ lastSynced: new Date(), updatedAt: new Date() }) .where(eq(workflow.id, id)) - // Sync MCP tools with the reverted version's parameter schema await syncMcpToolsForWorkflow({ workflowId: id, requestId, diff --git a/apps/sim/app/templates/components/template-card.tsx b/apps/sim/app/templates/components/template-card.tsx index b881f17b6b..3c1c32a414 100644 --- a/apps/sim/app/templates/components/template-card.tsx +++ b/apps/sim/app/templates/components/template-card.tsx @@ -106,8 +106,6 @@ function normalizeWorkflowState(input?: any): WorkflowState | null { lastUpdate: input.lastUpdate, metadata: input.metadata, variables: input.variables, - isDeployed: input.isDeployed, - deployedAt: input.deployedAt, deploymentStatuses: input.deploymentStatuses, needsRedeployment: input.needsRedeployment, dragStartPosition: input.dragStartPosition ?? null, diff --git a/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx b/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx index f5c1fd0630..3ea0023623 100644 --- a/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx +++ b/apps/sim/app/workspace/[workspaceId]/templates/components/template-card.tsx @@ -109,8 +109,6 @@ function normalizeWorkflowState(input?: any): WorkflowState | null { lastUpdate: input.lastUpdate, metadata: input.metadata, variables: input.variables, - isDeployed: input.isDeployed, - deployedAt: input.deployedAt, deploymentStatuses: input.deploymentStatuses, needsRedeployment: input.needsRedeployment, dragStartPosition: input.dragStartPosition ?? null, diff --git a/apps/sim/app/workspace/[workspaceId]/templates/templates.tsx b/apps/sim/app/workspace/[workspaceId]/templates/templates.tsx index 43a4bac2fd..884dce6d03 100644 --- a/apps/sim/app/workspace/[workspaceId]/templates/templates.tsx +++ b/apps/sim/app/workspace/[workspaceId]/templates/templates.tsx @@ -13,7 +13,7 @@ import { useDebounce } from '@/hooks/use-debounce' import type { WorkflowState } from '@/stores/workflows/workflow/types' /** - * Template data structure with support for both new and legacy fields + * Template data structure */ export interface Template { /** Unique identifier for the template */ @@ -59,16 +59,6 @@ export interface Template { isStarred: boolean /** Whether the current user is a super user */ isSuperUser?: boolean - /** @deprecated Legacy field - use creator.referenceId instead */ - userId?: string - /** @deprecated Legacy field - use details.tagline instead */ - description?: string | null - /** @deprecated Legacy field - use creator.name instead */ - author?: string - /** @deprecated Legacy field - use creator.referenceType instead */ - authorType?: 'user' | 'organization' - /** @deprecated Legacy field - use creator.referenceId when referenceType is 'organization' */ - organizationId?: string | null /** Display color for the template card */ color?: string /** Display icon for the template card */ @@ -107,7 +97,6 @@ export default function Templates({ /** * Filter templates based on active tab and search query - * Memoized to prevent unnecessary recalculations on render */ const filteredTemplates = useMemo(() => { const query = debouncedSearchQuery.toLowerCase() @@ -115,7 +104,7 @@ export default function Templates({ return templates.filter((template) => { const tabMatch = activeTab === 'your' - ? template.userId === currentUserId || template.isStarred + ? template.creator?.referenceId === currentUserId || template.isStarred : activeTab === 'gallery' ? template.status === 'approved' : template.status === 'pending' @@ -124,13 +113,7 @@ export default function Templates({ if (!query) return true - const searchableText = [ - template.name, - template.description, - template.details?.tagline, - template.author, - template.creator?.name, - ] + const searchableText = [template.name, template.details?.tagline, template.creator?.name] .filter(Boolean) .join(' ') .toLowerCase() @@ -141,7 +124,6 @@ export default function Templates({ /** * Get empty state message based on current filters - * Memoized to prevent unnecessary recalculations on render */ const emptyState = useMemo(() => { if (debouncedSearchQuery) { @@ -235,25 +217,20 @@ export default function Templates({
    ) : ( - filteredTemplates.map((template) => { - const author = template.author || template.creator?.name || 'Unknown' - const authorImageUrl = template.creator?.profileImageUrl || null - - return ( - - ) - }) + filteredTemplates.map((template) => ( + + )) )}
    diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/diff-controls/diff-controls.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/diff-controls/diff-controls.tsx index 5c3dbaebf6..5664c769c2 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/diff-controls/diff-controls.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/diff-controls/diff-controls.tsx @@ -109,12 +109,7 @@ export const DiffControls = memo(function DiffControls() { loops: rawState.loops || {}, parallels: rawState.parallels || {}, lastSaved: rawState.lastSaved || Date.now(), - isDeployed: rawState.isDeployed || false, deploymentStatuses: rawState.deploymentStatuses || {}, - // Only include deployedAt if it's a valid date, never include null/undefined - ...(rawState.deployedAt && rawState.deployedAt instanceof Date - ? { deployedAt: rawState.deployedAt } - : {}), } logger.info('Prepared complete workflow state for checkpoint', { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow.ts index b73fe534c2..a74573b07b 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow.ts @@ -16,8 +16,6 @@ export interface CurrentWorkflow { loops: Record parallels: Record lastSaved?: number - isDeployed?: boolean - deployedAt?: Date deploymentStatuses?: Record needsRedeployment?: boolean @@ -50,8 +48,6 @@ export function useCurrentWorkflow(): CurrentWorkflow { loops: state.loops, parallels: state.parallels, lastSaved: state.lastSaved, - isDeployed: state.isDeployed, - deployedAt: state.deployedAt, deploymentStatuses: state.deploymentStatuses, needsRedeployment: state.needsRedeployment, })) @@ -82,8 +78,6 @@ export function useCurrentWorkflow(): CurrentWorkflow { loops: activeWorkflow.loops || {}, parallels: activeWorkflow.parallels || {}, lastSaved: activeWorkflow.lastSaved, - isDeployed: activeWorkflow.isDeployed, - deployedAt: activeWorkflow.deployedAt, deploymentStatuses: activeWorkflow.deploymentStatuses, needsRedeployment: activeWorkflow.needsRedeployment, diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts index 9c19ebf116..e45df1ba65 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts @@ -117,7 +117,6 @@ export async function applyAutoLayoutAndUpdateStore( const cleanedWorkflowState = { ...stateToSave, - deployedAt: stateToSave.deployedAt ? new Date(stateToSave.deployedAt) : undefined, loops: stateToSave.loops || {}, parallels: stateToSave.parallels || {}, edges: (stateToSave.edges || []).map((edge: any) => { diff --git a/apps/sim/app/workspace/providers/socket-provider.tsx b/apps/sim/app/workspace/providers/socket-provider.tsx index 4d1df147d0..5f6d25cfb9 100644 --- a/apps/sim/app/workspace/providers/socket-provider.tsx +++ b/apps/sim/app/workspace/providers/socket-provider.tsx @@ -369,8 +369,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) { loops: workflowState.loops || {}, parallels: workflowState.parallels || {}, lastSaved: workflowState.lastSaved || Date.now(), - isDeployed: workflowState.isDeployed ?? false, - deployedAt: workflowState.deployedAt, deploymentStatuses: workflowState.deploymentStatuses || {}, }) diff --git a/apps/sim/blocks/blocks/memory.ts b/apps/sim/blocks/blocks/memory.ts index e97e8de1e2..d349827b4a 100644 --- a/apps/sim/blocks/blocks/memory.ts +++ b/apps/sim/blocks/blocks/memory.ts @@ -121,12 +121,6 @@ export const MemoryBlock: BlockConfig = { if (!conversationId) { errors.push('Conversation ID is required for add operation') } - if (!params.role) { - errors.push('Role is required for agent memory') - } - if (!params.content) { - errors.push('Content is required for agent memory') - } } if (params.operation === 'get' || params.operation === 'delete') { diff --git a/apps/sim/blocks/blocks/posthog.ts b/apps/sim/blocks/blocks/posthog.ts index 74b6baf62f..fc23c70450 100644 --- a/apps/sim/blocks/blocks/posthog.ts +++ b/apps/sim/blocks/blocks/posthog.ts @@ -23,7 +23,6 @@ export const PostHogBlock: BlockConfig = { // Core Data Operations { label: 'Capture Event', id: 'posthog_capture_event' }, { label: 'Batch Events', id: 'posthog_batch_events' }, - { label: 'List Events', id: 'posthog_list_events' }, { label: 'List Persons', id: 'posthog_list_persons' }, { label: 'Get Person', id: 'posthog_get_person' }, { label: 'Delete Person', id: 'posthog_delete_person' }, @@ -105,7 +104,6 @@ export const PostHogBlock: BlockConfig = { 'posthog_get_event_definition', 'posthog_update_event_definition', // Core Operations (with personalApiKey) - 'posthog_list_events', 'posthog_list_persons', 'posthog_get_person', 'posthog_delete_person', @@ -403,54 +401,6 @@ Return ONLY the JSON array.`, condition: { field: 'operation', value: 'posthog_query' }, }, - // List Events filters - { - id: 'eventFilter', - title: 'Event Name Filter', - type: 'short-input', - placeholder: 'e.g., page_view, button_clicked', - condition: { field: 'operation', value: 'posthog_list_events' }, - }, - { - id: 'before', - title: 'Before (ISO 8601)', - type: 'short-input', - placeholder: '2024-01-01T12:00:00Z', - condition: { field: 'operation', value: 'posthog_list_events' }, - wandConfig: { - enabled: true, - prompt: `Generate an ISO 8601 timestamp based on the user's description. -The timestamp should be in the format: YYYY-MM-DDTHH:MM:SSZ (UTC timezone). -Examples: -- "today" -> Today's date at 00:00:00Z -- "this week" -> The start of this week at 00:00:00Z -- "last month" -> The 1st of last month at 00:00:00Z - -Return ONLY the timestamp string - no explanations, no quotes, no extra text.`, - placeholder: 'Describe the cutoff date (e.g., "today", "this week")...', - generationType: 'timestamp', - }, - }, - { - id: 'after', - title: 'After (ISO 8601)', - type: 'short-input', - placeholder: '2024-01-01T00:00:00Z', - condition: { field: 'operation', value: 'posthog_list_events' }, - wandConfig: { - enabled: true, - prompt: `Generate an ISO 8601 timestamp based on the user's description. -The timestamp should be in the format: YYYY-MM-DDTHH:MM:SSZ (UTC timezone). -Examples: -- "yesterday" -> Yesterday's date at 00:00:00Z -- "last week" -> 7 days ago at 00:00:00Z -- "beginning of this month" -> The 1st of this month at 00:00:00Z - -Return ONLY the timestamp string - no explanations, no quotes, no extra text.`, - placeholder: 'Describe the start date (e.g., "yesterday", "last week")...', - generationType: 'timestamp', - }, - }, { id: 'distinctIdFilter', title: 'Distinct ID Filter', @@ -458,7 +408,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`, placeholder: 'user123', condition: { field: 'operation', - value: ['posthog_list_events', 'posthog_list_persons'], + value: 'posthog_list_persons', }, }, @@ -1079,7 +1029,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`, condition: { field: 'operation', value: [ - 'posthog_list_events', 'posthog_list_persons', 'posthog_list_insights', 'posthog_list_dashboards', @@ -1104,7 +1053,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`, condition: { field: 'operation', value: [ - 'posthog_list_events', 'posthog_list_persons', 'posthog_list_insights', 'posthog_list_dashboards', @@ -1188,7 +1136,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`, // Core Data 'posthog_capture_event', 'posthog_batch_events', - 'posthog_list_events', 'posthog_list_persons', 'posthog_get_person', 'posthog_delete_person', @@ -1297,17 +1244,8 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`, params.tags = params.insightTags } - // Map eventFilter to event for list_events - if (params.operation === 'posthog_list_events' && params.eventFilter) { - params.event = params.eventFilter - } - - // Map distinctIdFilter to distinctId for list operations - if ( - (params.operation === 'posthog_list_events' || - params.operation === 'posthog_list_persons') && - params.distinctIdFilter - ) { + // Map distinctIdFilter to distinctId for list_persons + if (params.operation === 'posthog_list_persons' && params.distinctIdFilter) { params.distinctId = params.distinctIdFilter } diff --git a/apps/sim/executor/handlers/generic/generic-handler.ts b/apps/sim/executor/handlers/generic/generic-handler.ts index 13ed1b52d0..c61baf7959 100644 --- a/apps/sim/executor/handlers/generic/generic-handler.ts +++ b/apps/sim/executor/handlers/generic/generic-handler.ts @@ -34,14 +34,8 @@ export class GenericBlockHandler implements BlockHandler { if (blockType) { const blockConfig = getBlock(blockType) if (blockConfig?.tools?.config?.params) { - try { - const transformedParams = blockConfig.tools.config.params(inputs) - finalInputs = { ...inputs, ...transformedParams } - } catch (error) { - logger.warn(`Failed to apply parameter transformation for block type ${blockType}:`, { - error: error instanceof Error ? error.message : String(error), - }) - } + const transformedParams = blockConfig.tools.config.params(inputs) + finalInputs = { ...inputs, ...transformedParams } } if (blockConfig?.inputs) { diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts index ba6fda4e1c..5b46caf03f 100644 --- a/apps/sim/hooks/use-collaborative-workflow.ts +++ b/apps/sim/hooks/use-collaborative-workflow.ts @@ -615,8 +615,6 @@ export function useCollaborativeWorkflow() { edges: workflowData.state.edges || [], loops: workflowData.state.loops || {}, parallels: workflowData.state.parallels || {}, - isDeployed: workflowData.state.isDeployed || false, - deployedAt: workflowData.state.deployedAt, lastSaved: workflowData.state.lastSaved || Date.now(), deploymentStatuses: workflowData.state.deploymentStatuses || {}, }) diff --git a/apps/sim/lib/workflows/defaults.ts b/apps/sim/lib/workflows/defaults.ts index cfb0a20bbf..590594aa53 100644 --- a/apps/sim/lib/workflows/defaults.ts +++ b/apps/sim/lib/workflows/defaults.ts @@ -119,8 +119,6 @@ export function buildDefaultWorkflowArtifacts(): DefaultWorkflowArtifacts { loops: {}, parallels: {}, lastSaved: Date.now(), - isDeployed: false, - deployedAt: undefined, deploymentStatuses: {}, needsRedeployment: false, } diff --git a/apps/sim/stores/panel/copilot/store.ts b/apps/sim/stores/panel/copilot/store.ts index 8a5a634af2..d00ca84c7a 100644 --- a/apps/sim/stores/panel/copilot/store.ts +++ b/apps/sim/stores/panel/copilot/store.ts @@ -2483,8 +2483,6 @@ export const useCopilotStore = create()( loops: reverted.loops || {}, parallels: reverted.parallels || {}, lastSaved: reverted.lastSaved || Date.now(), - isDeployed: !!reverted.isDeployed, - ...(reverted.deployedAt ? { deployedAt: new Date(reverted.deployedAt) } : {}), deploymentStatuses: reverted.deploymentStatuses || {}, }) diff --git a/apps/sim/stores/terminal/console/store.ts b/apps/sim/stores/terminal/console/store.ts index 45b0ae0bca..66da2b5089 100644 --- a/apps/sim/stores/terminal/console/store.ts +++ b/apps/sim/stores/terminal/console/store.ts @@ -35,7 +35,6 @@ const isStreamingOutput = (output: any): boolean => { return false } - // Check for streaming indicators return ( output.isStreaming === true || ('executionData' in output && @@ -53,12 +52,10 @@ const shouldSkipEntry = (output: any): boolean => { return false } - // Skip raw streaming objects with both stream and executionData if ('stream' in output && 'executionData' in output) { return true } - // Skip raw StreamingExecution objects if ('stream' in output && 'execution' in output) { return true } @@ -75,12 +72,10 @@ export const useTerminalConsoleStore = create()( addConsole: (entry: Omit) => { set((state) => { - // Skip duplicate streaming entries if (shouldSkipEntry(entry.output)) { return { entries: state.entries } } - // Redact API keys from output and input const redactedEntry = { ...entry } if ( !isStreamingOutput(entry.output) && @@ -93,7 +88,6 @@ export const useTerminalConsoleStore = create()( redactedEntry.input = redactApiKeys(redactedEntry.input) } - // Create new entry with ID and timestamp const newEntry: ConsoleEntry = { ...redactedEntry, id: crypto.randomUUID(), @@ -105,8 +99,6 @@ export const useTerminalConsoleStore = create()( const newEntry = get().entries[0] - // Surface error notifications immediately when error entries are added - // Only show if error notifications are enabled in settings if (newEntry?.error) { const { isErrorNotificationsEnabled } = useGeneralStore.getState() @@ -115,7 +107,6 @@ export const useTerminalConsoleStore = create()( const errorMessage = String(newEntry.error) const blockName = newEntry.blockName || 'Unknown Block' - // Copilot message includes block name for better debugging context const copilotMessage = `${errorMessage}\n\nError in ${blockName}.\n\nPlease fix this.` useNotificationStore.getState().addNotification({ @@ -147,22 +138,6 @@ export const useTerminalConsoleStore = create()( set((state) => ({ entries: state.entries.filter((entry) => entry.workflowId !== workflowId), })) - // Clear run path indicators when console is cleared - useExecutionStore.getState().clearRunPath() - }, - - /** - * Clears all console entries or entries for a specific workflow and clears the run path - * @param workflowId - The workflow ID to clear entries for, or null to clear all - * @deprecated Use clearWorkflowConsole for clearing specific workflows - */ - clearConsole: (workflowId: string | null) => { - set((state) => ({ - entries: workflowId - ? state.entries.filter((entry) => entry.workflowId !== workflowId) - : [], - })) - // Clear run path indicators when console is cleared useExecutionStore.getState().clearRunPath() }, @@ -183,7 +158,6 @@ export const useTerminalConsoleStore = create()( let stringValue = typeof value === 'object' ? JSON.stringify(value) : String(value) - // Escape quotes and wrap in quotes if contains special characters if ( stringValue.includes('"') || stringValue.includes(',') || @@ -232,7 +206,6 @@ export const useTerminalConsoleStore = create()( const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19) const filename = `terminal-console-${workflowId}-${timestamp}.csv` - // Create and trigger download const blob = new Blob([csvContent], { type: 'text/csv;charset=utf-8;' }) const link = document.createElement('a') @@ -259,18 +232,15 @@ export const useTerminalConsoleStore = create()( updateConsole: (blockId: string, update: string | ConsoleUpdate, executionId?: string) => { set((state) => { const updatedEntries = state.entries.map((entry) => { - // Only update if both blockId and executionId match if (entry.blockId !== blockId || entry.executionId !== executionId) { return entry } - // Handle simple string update if (typeof update === 'string') { const newOutput = updateBlockOutput(entry.output, update) return { ...entry, output: newOutput } } - // Handle complex update const updatedEntry = { ...entry } if (update.content !== undefined) { diff --git a/apps/sim/stores/terminal/console/types.ts b/apps/sim/stores/terminal/console/types.ts index 450f946e3e..416575fa38 100644 --- a/apps/sim/stores/terminal/console/types.ts +++ b/apps/sim/stores/terminal/console/types.ts @@ -48,7 +48,6 @@ export interface ConsoleStore { isOpen: boolean addConsole: (entry: Omit) => ConsoleEntry clearWorkflowConsole: (workflowId: string) => void - clearConsole: (workflowId: string | null) => void exportConsoleCSV: (workflowId: string) => void getWorkflowEntries: (workflowId: string) => ConsoleEntry[] toggleConsole: () => void diff --git a/apps/sim/stores/workflows/index.ts b/apps/sim/stores/workflows/index.ts index 305f8cbbd4..c3cc04ec6f 100644 --- a/apps/sim/stores/workflows/index.ts +++ b/apps/sim/stores/workflows/index.ts @@ -34,13 +34,7 @@ export function getWorkflowWithValues(workflowId: string) { const deploymentStatus = useWorkflowRegistry.getState().getWorkflowDeploymentStatus(workflowId) // Use the current state from the store (only available for active workflow) - const workflowState: WorkflowState = { - // Use the main store's method to get the base workflow state - ...useWorkflowStore.getState().getWorkflowState(), - // Override deployment fields with registry-specific deployment status - isDeployed: deploymentStatus?.isDeployed || false, - deployedAt: deploymentStatus?.deployedAt, - } + const workflowState: WorkflowState = useWorkflowStore.getState().getWorkflowState() // Merge the subblock values for this specific workflow const mergedBlocks = mergeSubblockState(workflowState.blocks, workflowId) @@ -58,8 +52,9 @@ export function getWorkflowWithValues(workflowId: string) { loops: workflowState.loops, parallels: workflowState.parallels, lastSaved: workflowState.lastSaved, - isDeployed: workflowState.isDeployed, - deployedAt: workflowState.deployedAt, + // Get deployment fields from registry for API compatibility + isDeployed: deploymentStatus?.isDeployed || false, + deployedAt: deploymentStatus?.deployedAt, }, } } @@ -101,7 +96,6 @@ export function getAllWorkflowsWithValues() { // Ensure state has all required fields for Zod validation const workflowState: WorkflowState = { - // Use the main store's method to get the base workflow state with fallback values ...useWorkflowStore.getState().getWorkflowState(), // Ensure fallback values for safer handling blocks: currentState.blocks || {}, @@ -109,9 +103,6 @@ export function getAllWorkflowsWithValues() { loops: currentState.loops || {}, parallels: currentState.parallels || {}, lastSaved: currentState.lastSaved || Date.now(), - // Override deployment fields with registry-specific deployment status - isDeployed: deploymentStatus?.isDeployed || false, - deployedAt: deploymentStatus?.deployedAt, } // Merge the subblock values for this specific workflow @@ -132,8 +123,9 @@ export function getAllWorkflowsWithValues() { loops: workflowState.loops, parallels: workflowState.parallels, lastSaved: workflowState.lastSaved, - isDeployed: workflowState.isDeployed, - deployedAt: workflowState.deployedAt, + // Get deployment fields from registry for API compatibility + isDeployed: deploymentStatus?.isDeployed || false, + deployedAt: deploymentStatus?.deployedAt, }, // Include API key if available apiKey, diff --git a/apps/sim/stores/workflows/registry/store.ts b/apps/sim/stores/workflows/registry/store.ts index 3c1c03987e..6ba54ec1fa 100644 --- a/apps/sim/stores/workflows/registry/store.ts +++ b/apps/sim/stores/workflows/registry/store.ts @@ -40,9 +40,7 @@ function resetWorkflowStores() { edges: [], loops: {}, parallels: {}, - isDeployed: false, - deployedAt: undefined, - deploymentStatuses: {}, // Reset deployment statuses map + deploymentStatuses: {}, lastSaved: Date.now(), }) @@ -227,31 +225,6 @@ export const useWorkflowRegistry = create()( }, }, })) - - // Also update the workflow store if this is the active workflow - const { activeWorkflowId } = get() - if (workflowId === activeWorkflowId) { - // Update the workflow store for backward compatibility - useWorkflowStore.setState((state) => ({ - isDeployed, - deployedAt: deployedAt || (isDeployed ? new Date() : undefined), - needsRedeployment: isDeployed ? false : state.needsRedeployment, - deploymentStatuses: { - ...state.deploymentStatuses, - [workflowId as string]: { - isDeployed, - deployedAt: deployedAt || (isDeployed ? new Date() : undefined), - apiKey, - needsRedeployment: isDeployed - ? false - : ((state.deploymentStatuses?.[workflowId as string] as any)?.needsRedeployment ?? - false), - }, - }, - })) - } - - // Note: Socket.IO handles real-time sync automatically }, // Method to set the needsRedeployment flag for a specific workflow @@ -322,9 +295,6 @@ export const useWorkflowRegistry = create()( edges: workflowData.state.edges || [], loops: workflowData.state.loops || {}, parallels: workflowData.state.parallels || {}, - isDeployed: workflowData.isDeployed || false, - deployedAt: workflowData.deployedAt ? new Date(workflowData.deployedAt) : undefined, - apiKey: workflowData.apiKey, lastSaved: Date.now(), deploymentStatuses: {}, } @@ -334,8 +304,6 @@ export const useWorkflowRegistry = create()( edges: [], loops: {}, parallels: {}, - isDeployed: false, - deployedAt: undefined, deploymentStatuses: {}, lastSaved: Date.now(), } @@ -543,8 +511,6 @@ export const useWorkflowRegistry = create()( edges: sourceState.edges, loops: sourceState.loops, parallels: sourceState.parallels, - isDeployed: false, - deployedAt: undefined, workspaceId, deploymentStatuses: {}, lastSaved: Date.now(), @@ -622,8 +588,6 @@ export const useWorkflowRegistry = create()( edges: [...useWorkflowStore.getState().edges], loops: { ...useWorkflowStore.getState().loops }, parallels: { ...useWorkflowStore.getState().parallels }, - isDeployed: useWorkflowStore.getState().isDeployed, - deployedAt: useWorkflowStore.getState().deployedAt, lastSaved: useWorkflowStore.getState().lastSaved, } : null, @@ -646,8 +610,6 @@ export const useWorkflowRegistry = create()( edges: [], loops: {}, parallels: {}, - isDeployed: false, - deployedAt: undefined, lastSaved: Date.now(), }) diff --git a/apps/sim/stores/workflows/workflow/store.ts b/apps/sim/stores/workflows/workflow/store.ts index 36475ba462..d59c4cfc9b 100644 --- a/apps/sim/stores/workflows/workflow/store.ts +++ b/apps/sim/stores/workflows/workflow/store.ts @@ -97,10 +97,6 @@ const initialState = { loops: {}, parallels: {}, lastSaved: undefined, - // Legacy deployment fields (keeping for compatibility but they will be deprecated) - isDeployed: false, - deployedAt: undefined, - // New field for per-workflow deployment tracking deploymentStatuses: {}, needsRedeployment: false, } @@ -174,7 +170,6 @@ export const useWorkflowStore = create()( ...data, ...(parentId && { parentId, extent: extent || 'parent' }), } - // #endregion const subBlocks: Record = {} const subBlockStore = useSubBlockStore.getState() @@ -506,8 +501,6 @@ export const useWorkflowStore = create()( loops: state.loops, parallels: state.parallels, lastSaved: state.lastSaved, - isDeployed: state.isDeployed, - deployedAt: state.deployedAt, deploymentStatuses: state.deploymentStatuses, needsRedeployment: state.needsRedeployment, } @@ -534,9 +527,6 @@ export const useWorkflowStore = create()( edges: nextEdges, loops: nextLoops, parallels: nextParallels, - isDeployed: - workflowState.isDeployed !== undefined ? workflowState.isDeployed : state.isDeployed, - deployedAt: workflowState.deployedAt ?? state.deployedAt, deploymentStatuses: workflowState.deploymentStatuses || state.deploymentStatuses, needsRedeployment: workflowState.needsRedeployment !== undefined @@ -1043,7 +1033,6 @@ export const useWorkflowStore = create()( edges: deployedState.edges, loops: deployedState.loops || {}, parallels: deployedState.parallels || {}, - isDeployed: true, needsRedeployment: false, // Keep existing deployment statuses and update for the active workflow if needed deploymentStatuses: { diff --git a/apps/sim/stores/workflows/workflow/types.ts b/apps/sim/stores/workflows/workflow/types.ts index 97fcf033a8..2488c5e43c 100644 --- a/apps/sim/stores/workflows/workflow/types.ts +++ b/apps/sim/stores/workflows/workflow/types.ts @@ -164,8 +164,6 @@ export interface WorkflowState { exportedAt?: string } variables?: Record - isDeployed?: boolean - deployedAt?: Date deploymentStatuses?: Record needsRedeployment?: boolean dragStartPosition?: DragStartPosition | null diff --git a/apps/sim/tools/memory/add.ts b/apps/sim/tools/memory/add.ts index 296275517e..b550d8d99d 100644 --- a/apps/sim/tools/memory/add.ts +++ b/apps/sim/tools/memory/add.ts @@ -1,4 +1,3 @@ -import { buildMemoryKey } from '@/tools/memory/helpers' import type { MemoryResponse } from '@/tools/memory/types' import type { ToolConfig } from '@/tools/types' @@ -24,11 +23,13 @@ export const memoryAddTool: ToolConfig = { role: { type: 'string', required: true, + visibility: 'user-or-llm', description: 'Role for agent memory (user, assistant, or system)', }, content: { type: 'string', required: true, + visibility: 'user-or-llm', description: 'Content for agent memory', }, }, @@ -41,52 +42,15 @@ export const memoryAddTool: ToolConfig = { }), body: (params) => { const workspaceId = params._context?.workspaceId - if (!workspaceId) { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'workspaceId is required and must be provided in execution context', - }, - }, - }, - } + throw new Error('workspaceId is required in execution context') } const conversationId = params.conversationId || params.id - - if (!conversationId || conversationId.trim() === '') { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'conversationId or id is required', - }, - }, - }, - } + if (!conversationId) { + throw new Error('conversationId or id is required') } - - if (!params.role || !params.content) { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'Role and content are required for agent memory', - }, - }, - }, - } - } - - const key = buildMemoryKey(conversationId) + const key = conversationId const body: Record = { key, diff --git a/apps/sim/tools/memory/delete.ts b/apps/sim/tools/memory/delete.ts index 80c0da3adc..aa5a577e19 100644 --- a/apps/sim/tools/memory/delete.ts +++ b/apps/sim/tools/memory/delete.ts @@ -23,37 +23,15 @@ export const memoryDeleteTool: ToolConfig = { }, request: { - url: (params): any => { + url: (params) => { const workspaceId = params._context?.workspaceId - if (!workspaceId) { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'workspaceId is required and must be provided in execution context', - }, - }, - }, - } + throw new Error('workspaceId is required in execution context') } const conversationId = params.conversationId || params.id - if (!conversationId) { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'conversationId or id must be provided', - }, - }, - }, - } + throw new Error('conversationId or id is required') } const url = new URL('/api/memory', 'http://dummy') diff --git a/apps/sim/tools/memory/get.ts b/apps/sim/tools/memory/get.ts index e93b58f152..21830a42ec 100644 --- a/apps/sim/tools/memory/get.ts +++ b/apps/sim/tools/memory/get.ts @@ -1,4 +1,3 @@ -import { buildMemoryKey } from '@/tools/memory/helpers' import type { MemoryResponse } from '@/tools/memory/types' import type { ToolConfig } from '@/tools/types' @@ -24,40 +23,17 @@ export const memoryGetTool: ToolConfig = { }, request: { - url: (params): any => { + url: (params) => { const workspaceId = params._context?.workspaceId - if (!workspaceId) { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'workspaceId is required and must be provided in execution context', - }, - }, - }, - } + throw new Error('workspaceId is required in execution context') } const conversationId = params.conversationId || params.id - if (!conversationId) { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'conversationId or id must be provided', - }, - }, - }, - } + throw new Error('conversationId or id is required') } - - const query = buildMemoryKey(conversationId) + const query = conversationId const url = new URL('/api/memory', 'http://dummy') url.searchParams.set('workspaceId', workspaceId) diff --git a/apps/sim/tools/memory/get_all.ts b/apps/sim/tools/memory/get_all.ts index b204b0d103..7d32eff180 100644 --- a/apps/sim/tools/memory/get_all.ts +++ b/apps/sim/tools/memory/get_all.ts @@ -10,21 +10,10 @@ export const memoryGetAllTool: ToolConfig = { params: {}, request: { - url: (params): any => { + url: (params) => { const workspaceId = params._context?.workspaceId - if (!workspaceId) { - return { - _errorResponse: { - status: 400, - data: { - success: false, - error: { - message: 'workspaceId is required and must be provided in execution context', - }, - }, - }, - } + throw new Error('workspaceId is required in execution context') } return `/api/memory?workspaceId=${encodeURIComponent(workspaceId)}` diff --git a/apps/sim/tools/memory/helpers.ts b/apps/sim/tools/memory/helpers.ts deleted file mode 100644 index 5fdb23583e..0000000000 --- a/apps/sim/tools/memory/helpers.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Parse memory key to extract conversationId - * Memory is now thread-scoped, so the key is just the conversationId - * @param key The memory key (conversationId) - * @returns Object with conversationId, or null if invalid - */ -export function parseMemoryKey(key: string): { conversationId: string } | null { - if (!key) { - return null - } - - return { - conversationId: key, - } -} - -/** - * Build memory key from conversationId - * Memory is thread-scoped, so key is just the conversationId - * @param conversationId The conversation ID - * @returns The memory key (same as conversationId) - */ -export function buildMemoryKey(conversationId: string): string { - return conversationId -} diff --git a/apps/sim/tools/posthog/index.ts b/apps/sim/tools/posthog/index.ts index ab8dfdd3bd..7868ecab55 100644 --- a/apps/sim/tools/posthog/index.ts +++ b/apps/sim/tools/posthog/index.ts @@ -29,7 +29,6 @@ import { listCohortsTool } from '@/tools/posthog/list_cohorts' import { listDashboardsTool } from '@/tools/posthog/list_dashboards' // Data Management import { listEventDefinitionsTool } from '@/tools/posthog/list_event_definitions' -import { listEventsTool } from '@/tools/posthog/list_events' import { listExperimentsTool } from '@/tools/posthog/list_experiments' // Feature Management import { listFeatureFlagsTool } from '@/tools/posthog/list_feature_flags' @@ -53,7 +52,6 @@ import { updateSurveyTool } from '@/tools/posthog/update_survey' // Export all tools with posthog prefix export const posthogCaptureEventTool = captureEventTool export const posthogBatchEventsTool = batchEventsTool -export const posthogListEventsTool = listEventsTool export const posthogListPersonsTool = listPersonsTool export const posthogGetPersonTool = getPersonTool export const posthogDeletePersonTool = deletePersonTool diff --git a/apps/sim/tools/posthog/list_events.ts b/apps/sim/tools/posthog/list_events.ts deleted file mode 100644 index d79730ad5a..0000000000 --- a/apps/sim/tools/posthog/list_events.ts +++ /dev/null @@ -1,190 +0,0 @@ -import type { ToolConfig } from '@/tools/types' - -export interface PostHogListEventsParams { - personalApiKey: string - region?: 'us' | 'eu' - projectId: string - limit?: number - offset?: number - event?: string - distinctId?: string - before?: string - after?: string -} - -export interface PostHogEvent { - id: string - event: string - distinct_id: string - properties: Record - timestamp: string - person?: { - id: string - distinct_ids: string[] - properties: Record - } -} - -export interface PostHogListEventsResponse { - success: boolean - output: { - events: PostHogEvent[] - next?: string - } -} - -export const listEventsTool: ToolConfig = { - id: 'posthog_list_events', - name: 'PostHog List Events', - description: - 'List events in PostHog. Note: This endpoint is deprecated but kept for backwards compatibility. For production use, prefer the Query endpoint with HogQL.', - version: '1.0.0', - - params: { - personalApiKey: { - type: 'string', - required: true, - visibility: 'user-only', - description: 'PostHog Personal API Key (for authenticated API access)', - }, - region: { - type: 'string', - required: false, - visibility: 'user-only', - description: 'PostHog region: us (default) or eu', - default: 'us', - }, - projectId: { - type: 'string', - required: true, - visibility: 'user-only', - description: 'PostHog Project ID', - }, - limit: { - type: 'number', - required: false, - visibility: 'user-only', - description: 'Number of events to return (default: 100, max: 100)', - default: 100, - }, - offset: { - type: 'number', - required: false, - visibility: 'user-only', - description: 'Number of events to skip for pagination', - }, - event: { - type: 'string', - required: false, - visibility: 'user-or-llm', - description: 'Filter by specific event name', - }, - distinctId: { - type: 'string', - required: false, - visibility: 'user-or-llm', - description: 'Filter by specific distinct_id', - }, - before: { - type: 'string', - required: false, - visibility: 'user-or-llm', - description: 'ISO 8601 timestamp - only return events before this time', - }, - after: { - type: 'string', - required: false, - visibility: 'user-or-llm', - description: 'ISO 8601 timestamp - only return events after this time', - }, - }, - - request: { - url: (params) => { - const baseUrl = params.region === 'eu' ? 'https://eu.posthog.com' : 'https://us.posthog.com' - const url = new URL(`${baseUrl}/api/projects/${params.projectId}/events/`) - - if (params.limit) url.searchParams.append('limit', params.limit.toString()) - if (params.offset) url.searchParams.append('offset', params.offset.toString()) - if (params.event) url.searchParams.append('event', params.event) - if (params.distinctId) url.searchParams.append('distinct_id', params.distinctId) - if (params.before) url.searchParams.append('before', params.before) - if (params.after) url.searchParams.append('after', params.after) - - return url.toString() - }, - method: 'GET', - headers: (params) => ({ - Authorization: `Bearer ${params.personalApiKey}`, - 'Content-Type': 'application/json', - }), - }, - - transformResponse: async (response: Response) => { - if (!response.ok) { - const error = await response.text() - return { - success: false, - output: { - events: [], - }, - error: error || 'Failed to list events', - } - } - - const data = await response.json() - - return { - success: true, - output: { - events: - data.results?.map((event: any) => ({ - id: event.id, - event: event.event, - distinct_id: event.distinct_id, - properties: event.properties || {}, - timestamp: event.timestamp, - person: event.person - ? { - id: event.person.id, - distinct_ids: event.person.distinct_ids || [], - properties: event.person.properties || {}, - } - : undefined, - })) || [], - next: data.next || undefined, - }, - } - }, - - outputs: { - events: { - type: 'array', - description: 'List of events with their properties and metadata', - items: { - type: 'object', - properties: { - id: { type: 'string', description: 'Unique event ID' }, - event: { type: 'string', description: 'Event name' }, - distinct_id: { type: 'string', description: 'User or device identifier' }, - properties: { type: 'object', description: 'Event properties' }, - timestamp: { type: 'string', description: 'When the event occurred' }, - person: { - type: 'object', - description: 'Associated person data', - properties: { - id: { type: 'string', description: 'Person ID' }, - distinct_ids: { type: 'array', description: 'All distinct IDs for this person' }, - properties: { type: 'object', description: 'Person properties' }, - }, - }, - }, - }, - }, - next: { - type: 'string', - description: 'URL for the next page of results (if available)', - optional: true, - }, - }, -} diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index 6d8cb9ec2a..dae22573d8 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -891,7 +891,6 @@ import { posthogListCohortsTool, posthogListDashboardsTool, posthogListEventDefinitionsTool, - posthogListEventsTool, posthogListExperimentsTool, posthogListFeatureFlagsTool, posthogListInsightsTool, @@ -1903,7 +1902,6 @@ export const tools: Record = { perplexity_search: perplexitySearchTool, posthog_capture_event: posthogCaptureEventTool, posthog_batch_events: posthogBatchEventsTool, - posthog_list_events: posthogListEventsTool, posthog_list_persons: posthogListPersonsTool, posthog_get_person: posthogGetPersonTool, posthog_delete_person: posthogDeletePersonTool, From 796f73ee01facff3335b928a23987dc0271cfe28 Mon Sep 17 00:00:00 2001 From: Adam Gough <77861281+aadamgough@users.noreply.github.com> Date: Fri, 9 Jan 2026 16:56:07 -0800 Subject: [PATCH 07/16] improvement(google-drive) (#2752) * expanded metadata fields for google drive * added tag dropdown support * fixed greptile * added utils func * removed comments * updated docs * greptile comments * fixed output schema * reverted back to bas64 string --- .../content/docs/en/tools/google_drive.mdx | 17 +- apps/sim/tools/google_drive/create_folder.ts | 119 +++++++- apps/sim/tools/google_drive/download.ts | 164 ++++++++++- apps/sim/tools/google_drive/get_content.ts | 172 +++++++++-- apps/sim/tools/google_drive/list.ts | 112 +++++-- apps/sim/tools/google_drive/types.ts | 275 +++++++++++++++++- apps/sim/tools/google_drive/upload.ts | 94 +++++- apps/sim/tools/google_drive/utils.ts | 95 ++++++ 8 files changed, 949 insertions(+), 99 deletions(-) diff --git a/apps/docs/content/docs/en/tools/google_drive.mdx b/apps/docs/content/docs/en/tools/google_drive.mdx index 27de721e6e..54b3ed3bc3 100644 --- a/apps/docs/content/docs/en/tools/google_drive.mdx +++ b/apps/docs/content/docs/en/tools/google_drive.mdx @@ -48,7 +48,7 @@ Integrate Google Drive into the workflow. Can create, upload, and list files. ### `google_drive_upload` -Upload a file to Google Drive +Upload a file to Google Drive with complete metadata returned #### Input @@ -65,11 +65,11 @@ Upload a file to Google Drive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | Uploaded file metadata including ID, name, and links | +| `file` | object | Complete uploaded file metadata from Google Drive | ### `google_drive_create_folder` -Create a new folder in Google Drive +Create a new folder in Google Drive with complete metadata returned #### Input @@ -83,11 +83,11 @@ Create a new folder in Google Drive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | Created folder metadata including ID, name, and parent information | +| `file` | object | Complete created folder metadata from Google Drive | ### `google_drive_download` -Download a file from Google Drive (exports Google Workspace files automatically) +Download a file from Google Drive with complete metadata (exports Google Workspace files automatically) #### Input @@ -96,16 +96,17 @@ Download a file from Google Drive (exports Google Workspace files automatically) | `fileId` | string | Yes | The ID of the file to download | | `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) | | `fileName` | string | No | Optional filename override | +| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | file | Downloaded file stored in execution files | +| `file` | object | Downloaded file stored in execution files | ### `google_drive_list` -List files and folders in Google Drive +List files and folders in Google Drive with complete metadata #### Input @@ -121,7 +122,7 @@ List files and folders in Google Drive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `files` | json | Array of file metadata objects from the specified folder | +| `files` | array | Array of file metadata objects from Google Drive | diff --git a/apps/sim/tools/google_drive/create_folder.ts b/apps/sim/tools/google_drive/create_folder.ts index 6fb03f4b4f..6e04de4804 100644 --- a/apps/sim/tools/google_drive/create_folder.ts +++ b/apps/sim/tools/google_drive/create_folder.ts @@ -1,10 +1,14 @@ +import { createLogger } from '@sim/logger' import type { GoogleDriveToolParams, GoogleDriveUploadResponse } from '@/tools/google_drive/types' +import { ALL_FILE_FIELDS } from '@/tools/google_drive/utils' import type { ToolConfig } from '@/tools/types' +const logger = createLogger('GoogleDriveCreateFolderTool') + export const createFolderTool: ToolConfig = { id: 'google_drive_create_folder', name: 'Create Folder in Google Drive', - description: 'Create a new folder in Google Drive', + description: 'Create a new folder in Google Drive with complete metadata returned', version: '1.0', oauth: { @@ -66,35 +70,120 @@ export const createFolderTool: ToolConfig { + transformResponse: async (response: Response, params?: GoogleDriveToolParams) => { if (!response.ok) { const data = await response.json().catch(() => ({})) + logger.error('Failed to create folder in Google Drive', { + status: response.status, + statusText: response.statusText, + error: data, + }) throw new Error(data.error?.message || 'Failed to create folder in Google Drive') } + const data = await response.json() + const folderId = data.id + const authHeader = `Bearer ${params?.accessToken || ''}` + + // Fetch complete folder metadata with all fields + const metadataResponse = await fetch( + `https://www.googleapis.com/drive/v3/files/${folderId}?supportsAllDrives=true&fields=${ALL_FILE_FIELDS}`, + { + headers: { + Authorization: authHeader, + }, + } + ) + + if (!metadataResponse.ok) { + logger.warn('Failed to fetch complete metadata, returning basic response', { + status: metadataResponse.status, + statusText: metadataResponse.statusText, + }) + // Return basic response if metadata fetch fails + return { + success: true, + output: { + file: data, + }, + } + } + + const fullMetadata = await metadataResponse.json() + + logger.info('Folder created successfully', { + folderId: fullMetadata.id, + name: fullMetadata.name, + mimeType: fullMetadata.mimeType, + hasOwners: !!fullMetadata.owners?.length, + hasPermissions: !!fullMetadata.permissions?.length, + }) return { success: true, output: { - file: { - id: data.id, - name: data.name, - mimeType: data.mimeType, - webViewLink: data.webViewLink, - webContentLink: data.webContentLink, - size: data.size, - createdTime: data.createdTime, - modifiedTime: data.modifiedTime, - parents: data.parents, - }, + file: fullMetadata, }, } }, outputs: { file: { - type: 'json', - description: 'Created folder metadata including ID, name, and parent information', + type: 'object', + description: 'Complete created folder metadata from Google Drive', + properties: { + // Basic Info + id: { type: 'string', description: 'Google Drive folder ID' }, + name: { type: 'string', description: 'Folder name' }, + mimeType: { type: 'string', description: 'MIME type (application/vnd.google-apps.folder)' }, + kind: { type: 'string', description: 'Resource type identifier' }, + description: { type: 'string', description: 'Folder description' }, + // Ownership & Sharing + owners: { type: 'json', description: 'List of folder owners' }, + permissions: { type: 'json', description: 'Folder permissions' }, + permissionIds: { type: 'json', description: 'Permission IDs' }, + shared: { type: 'boolean', description: 'Whether folder is shared' }, + ownedByMe: { type: 'boolean', description: 'Whether owned by current user' }, + writersCanShare: { type: 'boolean', description: 'Whether writers can share' }, + viewersCanCopyContent: { type: 'boolean', description: 'Whether viewers can copy' }, + copyRequiresWriterPermission: { + type: 'boolean', + description: 'Whether copy requires writer permission', + }, + sharingUser: { type: 'json', description: 'User who shared the folder' }, + // Labels/Tags + starred: { type: 'boolean', description: 'Whether folder is starred' }, + trashed: { type: 'boolean', description: 'Whether folder is in trash' }, + explicitlyTrashed: { type: 'boolean', description: 'Whether explicitly trashed' }, + properties: { type: 'json', description: 'Custom properties' }, + appProperties: { type: 'json', description: 'App-specific properties' }, + folderColorRgb: { type: 'string', description: 'Folder color' }, + // Timestamps + createdTime: { type: 'string', description: 'Folder creation time' }, + modifiedTime: { type: 'string', description: 'Last modification time' }, + modifiedByMeTime: { type: 'string', description: 'When modified by current user' }, + viewedByMeTime: { type: 'string', description: 'When last viewed by current user' }, + sharedWithMeTime: { type: 'string', description: 'When shared with current user' }, + // User Info + lastModifyingUser: { type: 'json', description: 'User who last modified the folder' }, + viewedByMe: { type: 'boolean', description: 'Whether viewed by current user' }, + modifiedByMe: { type: 'boolean', description: 'Whether modified by current user' }, + // Links + webViewLink: { type: 'string', description: 'URL to view in browser' }, + iconLink: { type: 'string', description: 'URL to folder icon' }, + // Hierarchy & Location + parents: { type: 'json', description: 'Parent folder IDs' }, + spaces: { type: 'json', description: 'Spaces containing folder' }, + driveId: { type: 'string', description: 'Shared drive ID' }, + // Capabilities + capabilities: { type: 'json', description: 'User capabilities on folder' }, + // Versions + version: { type: 'string', description: 'Version number' }, + // Other + isAppAuthorized: { type: 'boolean', description: 'Whether created by requesting app' }, + contentRestrictions: { type: 'json', description: 'Content restrictions' }, + linkShareMetadata: { type: 'json', description: 'Link share metadata' }, + }, }, }, } diff --git a/apps/sim/tools/google_drive/download.ts b/apps/sim/tools/google_drive/download.ts index c01d1a0475..db58ac3d88 100644 --- a/apps/sim/tools/google_drive/download.ts +++ b/apps/sim/tools/google_drive/download.ts @@ -1,6 +1,16 @@ import { createLogger } from '@sim/logger' -import type { GoogleDriveDownloadResponse, GoogleDriveToolParams } from '@/tools/google_drive/types' -import { DEFAULT_EXPORT_FORMATS, GOOGLE_WORKSPACE_MIME_TYPES } from '@/tools/google_drive/utils' +import type { + GoogleDriveDownloadResponse, + GoogleDriveFile, + GoogleDriveRevision, + GoogleDriveToolParams, +} from '@/tools/google_drive/types' +import { + ALL_FILE_FIELDS, + ALL_REVISION_FIELDS, + DEFAULT_EXPORT_FORMATS, + GOOGLE_WORKSPACE_MIME_TYPES, +} from '@/tools/google_drive/utils' import type { ToolConfig } from '@/tools/types' const logger = createLogger('GoogleDriveDownloadTool') @@ -8,7 +18,8 @@ const logger = createLogger('GoogleDriveDownloadTool') export const downloadTool: ToolConfig = { id: 'google_drive_download', name: 'Download File from Google Drive', - description: 'Download a file from Google Drive (exports Google Workspace files automatically)', + description: + 'Download a file from Google Drive with complete metadata (exports Google Workspace files automatically)', version: '1.0', oauth: { @@ -41,11 +52,18 @@ export const downloadTool: ToolConfig - `https://www.googleapis.com/drive/v3/files/${params.fileId}?fields=id,name,mimeType&supportsAllDrives=true`, + `https://www.googleapis.com/drive/v3/files/${params.fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, @@ -64,7 +82,7 @@ export const downloadTool: ToolConfig - `https://www.googleapis.com/drive/v3/files/${params.fileId}?fields=id,name,mimeType&supportsAllDrives=true`, + `https://www.googleapis.com/drive/v3/files/${params.fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, @@ -61,7 +75,7 @@ export const getContentTool: ToolConfig = { id: 'google_drive_list', name: 'List Google Drive Files', - description: 'List files and folders in Google Drive', + description: 'List files and folders in Google Drive with complete metadata', version: '1.0', oauth: { @@ -55,20 +56,22 @@ export const listTool: ToolConfig { const url = new URL('https://www.googleapis.com/drive/v3/files') - url.searchParams.append( - 'fields', - 'files(id,name,mimeType,webViewLink,webContentLink,size,createdTime,modifiedTime,parents),nextPageToken' - ) + url.searchParams.append('fields', `files(${ALL_FILE_FIELDS}),nextPageToken`) // Ensure shared drives support - corpora=allDrives is critical for searching across shared drives url.searchParams.append('corpora', 'allDrives') url.searchParams.append('supportsAllDrives', 'true') url.searchParams.append('includeItemsFromAllDrives', 'true') + // Helper to escape single quotes for Google Drive query syntax + const escapeQueryValue = (value: string): string => + value.replace(/\\/g, '\\\\').replace(/'/g, "\\'") + // Build the query conditions const conditions = ['trashed = false'] // Always exclude trashed files const folderId = params.folderId || params.folderSelector if (folderId) { - conditions.push(`'${folderId}' in parents`) + const escapedFolderId = escapeQueryValue(folderId) + conditions.push(`'${escapedFolderId}' in parents`) } // Combine all conditions with AND @@ -76,7 +79,8 @@ export const listTool: ToolConfig ({ - id: file.id, - name: file.name, - mimeType: file.mimeType, - webViewLink: file.webViewLink, - webContentLink: file.webContentLink, - size: file.size, - createdTime: file.createdTime, - modifiedTime: file.modifiedTime, - parents: file.parents, - })), + files: data.files, nextPageToken: data.nextPageToken, }, } @@ -122,8 +116,86 @@ export const listTool: ToolConfig +} + +// Label/tag information +export interface GoogleDriveLabel { + id?: string + revisionId?: string + kind?: string + fields?: Record< + string, + { + kind?: string + id?: string + valueType?: string + dateString?: string[] + integer?: string[] + selection?: string[] + text?: string[] + user?: GoogleDriveUser[] + } + > +} + +// Content hints for indexing +export interface GoogleDriveContentHints { + indexableText?: string + thumbnail?: { + image?: string + mimeType?: string + } +} + +// Image-specific metadata +export interface GoogleDriveImageMediaMetadata { + width?: number + height?: number + rotation?: number + time?: string + cameraMake?: string + cameraModel?: string + exposureTime?: number + aperture?: number + flashUsed?: boolean + focalLength?: number + isoSpeed?: number + meteringMode?: string + sensor?: string + exposureMode?: string + colorSpace?: string + whiteBalance?: string + exposureBias?: number + maxApertureValue?: number + subjectDistance?: number + lens?: string + location?: { + latitude?: number + longitude?: number + altitude?: number + } +} + +// Video-specific metadata +export interface GoogleDriveVideoMediaMetadata { + width?: number + height?: number + durationMillis?: string +} + +// Shortcut details +export interface GoogleDriveShortcutDetails { + targetId?: string + targetMimeType?: string + targetResourceKey?: string +} + +// Content restrictions +export interface GoogleDriveContentRestriction { + readOnly?: boolean + reason?: string + type?: string + restrictingUser?: GoogleDriveUser + restrictionTime?: string + ownerRestricted?: boolean + systemRestricted?: boolean +} + +// Link share metadata +export interface GoogleDriveLinkShareMetadata { + securityUpdateEligible?: boolean + securityUpdateEnabled?: boolean +} + +// Capabilities - what the current user can do with the file +export interface GoogleDriveCapabilities { + canAcceptOwnership?: boolean + canAddChildren?: boolean + canAddFolderFromAnotherDrive?: boolean + canAddMyDriveParent?: boolean + canChangeCopyRequiresWriterPermission?: boolean + canChangeSecurityUpdateEnabled?: boolean + canChangeViewersCanCopyContent?: boolean + canComment?: boolean + canCopy?: boolean + canDelete?: boolean + canDeleteChildren?: boolean + canDownload?: boolean + canEdit?: boolean + canListChildren?: boolean + canModifyContent?: boolean + canModifyContentRestriction?: boolean + canModifyEditorContentRestriction?: boolean + canModifyLabels?: boolean + canModifyOwnerContentRestriction?: boolean + canMoveChildrenOutOfDrive?: boolean + canMoveChildrenOutOfTeamDrive?: boolean + canMoveChildrenWithinDrive?: boolean + canMoveChildrenWithinTeamDrive?: boolean + canMoveItemIntoTeamDrive?: boolean + canMoveItemOutOfDrive?: boolean + canMoveItemOutOfTeamDrive?: boolean + canMoveItemWithinDrive?: boolean + canMoveItemWithinTeamDrive?: boolean + canMoveTeamDriveItem?: boolean + canReadDrive?: boolean + canReadLabels?: boolean + canReadRevisions?: boolean + canReadTeamDrive?: boolean + canRemoveChildren?: boolean + canRemoveContentRestriction?: boolean + canRemoveMyDriveParent?: boolean + canRename?: boolean + canShare?: boolean + canTrash?: boolean + canTrashChildren?: boolean + canUntrash?: boolean +} + +// Revision information +export interface GoogleDriveRevision { + id?: string + mimeType?: string + modifiedTime?: string + keepForever?: boolean + published?: boolean + publishAuto?: boolean + publishedLink?: string + publishedOutsideDomain?: boolean + lastModifyingUser?: GoogleDriveUser + originalFilename?: string + md5Checksum?: string + size?: string + exportLinks?: Record + kind?: string +} + +// Complete file metadata - all 50+ fields from Google Drive API v3 export interface GoogleDriveFile { + // Basic Info id: string name: string mimeType: string + kind?: string + description?: string + originalFilename?: string + fullFileExtension?: string + fileExtension?: string + + // Ownership & Sharing + owners?: GoogleDriveUser[] + permissions?: GoogleDrivePermission[] + permissionIds?: string[] + shared?: boolean + ownedByMe?: boolean + writersCanShare?: boolean + viewersCanCopyContent?: boolean + copyRequiresWriterPermission?: boolean + sharingUser?: GoogleDriveUser + + // Labels/Tags + labels?: GoogleDriveLabel[] + labelInfo?: { + labels?: GoogleDriveLabel[] + } + starred?: boolean + trashed?: boolean + explicitlyTrashed?: boolean + properties?: Record + appProperties?: Record + folderColorRgb?: string + + // Timestamps + createdTime?: string + modifiedTime?: string + modifiedByMeTime?: string + viewedByMeTime?: string + sharedWithMeTime?: string + trashedTime?: string + + // User Info + lastModifyingUser?: GoogleDriveUser + trashingUser?: GoogleDriveUser + viewedByMe?: boolean + modifiedByMe?: boolean + + // Links webViewLink?: string webContentLink?: string + iconLink?: string + thumbnailLink?: string + exportLinks?: Record + + // Size & Storage size?: string - createdTime?: string - modifiedTime?: string + quotaBytesUsed?: string + + // Checksums + md5Checksum?: string + sha1Checksum?: string + sha256Checksum?: string + + // Hierarchy & Location parents?: string[] + spaces?: string[] + driveId?: string + teamDriveId?: string + + // Capabilities + capabilities?: GoogleDriveCapabilities + + // Versions + version?: string + headRevisionId?: string + + // Media Metadata + hasThumbnail?: boolean + thumbnailVersion?: string + imageMediaMetadata?: GoogleDriveImageMediaMetadata + videoMediaMetadata?: GoogleDriveVideoMediaMetadata + contentHints?: GoogleDriveContentHints + + // Other + isAppAuthorized?: boolean + contentRestrictions?: GoogleDriveContentRestriction[] + resourceKey?: string + shortcutDetails?: GoogleDriveShortcutDetails + linkShareMetadata?: GoogleDriveLinkShareMetadata + + // Revisions (fetched separately but included in response) + revisions?: GoogleDriveRevision[] } export interface GoogleDriveListResponse extends ToolResponse { @@ -37,9 +304,10 @@ export interface GoogleDriveDownloadResponse extends ToolResponse { file: { name: string mimeType: string - data: Buffer + data: string size: number } + metadata: GoogleDriveFile } } @@ -56,6 +324,7 @@ export interface GoogleDriveToolParams { pageSize?: number pageToken?: string exportMimeType?: string + includeRevisions?: boolean } export type GoogleDriveResponse = diff --git a/apps/sim/tools/google_drive/upload.ts b/apps/sim/tools/google_drive/upload.ts index 5d8f0c6747..df321fe9f0 100644 --- a/apps/sim/tools/google_drive/upload.ts +++ b/apps/sim/tools/google_drive/upload.ts @@ -1,6 +1,7 @@ import { createLogger } from '@sim/logger' import type { GoogleDriveToolParams, GoogleDriveUploadResponse } from '@/tools/google_drive/types' import { + ALL_FILE_FIELDS, GOOGLE_WORKSPACE_MIME_TYPES, handleSheetsFormat, SOURCE_MIME_TYPES, @@ -12,7 +13,7 @@ const logger = createLogger('GoogleDriveUploadTool') export const uploadTool: ToolConfig = { id: 'google_drive_upload', name: 'Upload to Google Drive', - description: 'Upload a file to Google Drive', + description: 'Upload a file to Google Drive with complete metadata returned', version: '1.0', oauth: { @@ -228,8 +229,9 @@ export const uploadTool: ToolConfig Date: Fri, 9 Jan 2026 17:12:58 -0800 Subject: [PATCH 08/16] fix(tools): fixed workflow tool for agent to respect user provided params, inject at runtime like all other tools (#2750) * fix(tools): fixed wrokflow tool for agent to respect user provided params, inject at runtime like all other tools * ack comments * remove redunant if-else * added tests --- apps/sim/app/api/help/route.ts | 15 +- .../components/help-modal/help-modal.tsx | 16 +- .../w/components/sidebar/sidebar.tsx | 7 +- apps/sim/providers/utils.test.ts | 240 +++++++++++++++++ apps/sim/providers/utils.ts | 24 +- apps/sim/tools/params.test.ts | 248 +++++++++++++++++- apps/sim/tools/params.ts | 122 +++++++-- 7 files changed, 630 insertions(+), 42 deletions(-) diff --git a/apps/sim/app/api/help/route.ts b/apps/sim/app/api/help/route.ts index 676397d2d2..f874c6304b 100644 --- a/apps/sim/app/api/help/route.ts +++ b/apps/sim/app/api/help/route.ts @@ -21,7 +21,6 @@ export async function POST(req: NextRequest) { const requestId = generateRequestId() try { - // Get user session const session = await getSession() if (!session?.user?.email) { logger.warn(`[${requestId}] Unauthorized help request attempt`) @@ -30,20 +29,20 @@ export async function POST(req: NextRequest) { const email = session.user.email - // Handle multipart form data const formData = await req.formData() - // Extract form fields const subject = formData.get('subject') as string const message = formData.get('message') as string const type = formData.get('type') as string + const workflowId = formData.get('workflowId') as string | null + const workspaceId = formData.get('workspaceId') as string + const userAgent = formData.get('userAgent') as string | null logger.info(`[${requestId}] Processing help request`, { type, email: `${email.substring(0, 3)}***`, // Log partial email for privacy }) - // Validate the form data const validationResult = helpFormSchema.safeParse({ subject, message, @@ -60,7 +59,6 @@ export async function POST(req: NextRequest) { ) } - // Extract images const images: { filename: string; content: Buffer; contentType: string }[] = [] for (const [key, value] of formData.entries()) { @@ -81,10 +79,14 @@ export async function POST(req: NextRequest) { logger.debug(`[${requestId}] Help request includes ${images.length} images`) - // Prepare email content + const userId = session.user.id let emailText = ` Type: ${type} From: ${email} +User ID: ${userId} +Workspace ID: ${workspaceId ?? 'N/A'} +Workflow ID: ${workflowId ?? 'N/A'} +Browser: ${userAgent ?? 'N/A'} ${message} ` @@ -115,7 +117,6 @@ ${message} logger.info(`[${requestId}] Help request email sent successfully`) - // Send confirmation email to the user try { const confirmationHtml = await renderHelpConfirmationEmail( type as 'bug' | 'feedback' | 'feature_request' | 'other', diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/help-modal/help-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/help-modal/help-modal.tsx index e8d82c61c8..a29e542f36 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/help-modal/help-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/help-modal/help-modal.tsx @@ -57,9 +57,11 @@ interface ImageWithPreview extends File { interface HelpModalProps { open: boolean onOpenChange: (open: boolean) => void + workflowId?: string + workspaceId: string } -export function HelpModal({ open, onOpenChange }: HelpModalProps) { +export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpModalProps) { const fileInputRef = useRef(null) const scrollContainerRef = useRef(null) @@ -370,18 +372,20 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) { setSubmitStatus(null) try { - // Prepare form data with images const formData = new FormData() formData.append('subject', data.subject) formData.append('message', data.message) formData.append('type', data.type) + formData.append('workspaceId', workspaceId) + formData.append('userAgent', navigator.userAgent) + if (workflowId) { + formData.append('workflowId', workflowId) + } - // Attach all images to form data images.forEach((image, index) => { formData.append(`image_${index}`, image) }) - // Submit to API const response = await fetch('/api/help', { method: 'POST', body: formData, @@ -392,11 +396,9 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) { throw new Error(errorData.error || 'Failed to submit help request') } - // Handle success setSubmitStatus('success') reset() - // Clean up resources images.forEach((image) => URL.revokeObjectURL(image.preview)) setImages([]) } catch (error) { @@ -406,7 +408,7 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) { setIsSubmitting(false) } }, - [images, reset] + [images, reset, workflowId, workspaceId] ) /** diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx index 13fd4aa42f..09c3f75a36 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx @@ -661,7 +661,12 @@ export function Sidebar() { /> {/* Footer Navigation Modals */} - + (open ? openSettingsModal() : closeSettingsModal())} diff --git a/apps/sim/providers/utils.test.ts b/apps/sim/providers/utils.test.ts index 1a0fedf40a..d8c08430fb 100644 --- a/apps/sim/providers/utils.test.ts +++ b/apps/sim/providers/utils.test.ts @@ -25,6 +25,7 @@ import { MODELS_WITH_TEMPERATURE_SUPPORT, MODELS_WITH_VERBOSITY, PROVIDERS_WITH_TOOL_USAGE_CONTROL, + prepareToolExecution, prepareToolsWithUsageControl, shouldBillModelUsage, supportsTemperature, @@ -979,6 +980,245 @@ describe('Tool Management', () => { }) }) +describe('prepareToolExecution', () => { + describe('basic parameter merging', () => { + it.concurrent('should merge LLM args with user params', () => { + const tool = { + params: { apiKey: 'user-key', channel: '#general' }, + } + const llmArgs = { message: 'Hello world', channel: '#random' } + const request = { workflowId: 'wf-123' } + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + expect(toolParams.apiKey).toBe('user-key') + expect(toolParams.channel).toBe('#general') // User value wins + expect(toolParams.message).toBe('Hello world') + }) + + it.concurrent('should filter out empty string user params', () => { + const tool = { + params: { apiKey: 'user-key', channel: '' }, // Empty channel + } + const llmArgs = { message: 'Hello', channel: '#llm-channel' } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + expect(toolParams.apiKey).toBe('user-key') + expect(toolParams.channel).toBe('#llm-channel') // LLM value used since user is empty + expect(toolParams.message).toBe('Hello') + }) + }) + + describe('inputMapping deep merge for workflow tools', () => { + it.concurrent('should deep merge inputMapping when user provides empty object', () => { + const tool = { + params: { + workflowId: 'child-workflow-123', + inputMapping: '{}', // Empty JSON string from UI + }, + } + const llmArgs = { + inputMapping: { query: 'search term', limit: 10 }, + } + const request = { workflowId: 'parent-workflow' } + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + // LLM values should be used since user object is empty + expect(toolParams.inputMapping).toEqual({ query: 'search term', limit: 10 }) + expect(toolParams.workflowId).toBe('child-workflow-123') + }) + + it.concurrent('should deep merge inputMapping with partial user values', () => { + const tool = { + params: { + workflowId: 'child-workflow', + inputMapping: '{"query": "", "customField": "user-value"}', // Partial values + }, + } + const llmArgs = { + inputMapping: { query: 'llm-search', limit: 10 }, + } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + // LLM fills empty query, user's customField preserved, LLM's limit included + expect(toolParams.inputMapping).toEqual({ + query: 'llm-search', + limit: 10, + customField: 'user-value', + }) + }) + + it.concurrent('should preserve non-empty user inputMapping values', () => { + const tool = { + params: { + workflowId: 'child-workflow', + inputMapping: '{"query": "user-search", "limit": 5}', + }, + } + const llmArgs = { + inputMapping: { query: 'llm-search', limit: 10, extra: 'field' }, + } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + // User values win, but LLM's extra field is included + expect(toolParams.inputMapping).toEqual({ + query: 'user-search', + limit: 5, + extra: 'field', + }) + }) + + it.concurrent('should handle inputMapping as object (not JSON string)', () => { + const tool = { + params: { + workflowId: 'child-workflow', + inputMapping: { query: '', customField: 'user-value' }, // Object, not string + }, + } + const llmArgs = { + inputMapping: { query: 'llm-search', limit: 10 }, + } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + expect(toolParams.inputMapping).toEqual({ + query: 'llm-search', + limit: 10, + customField: 'user-value', + }) + }) + + it.concurrent('should use LLM inputMapping when user does not provide it', () => { + const tool = { + params: { workflowId: 'child-workflow' }, // No inputMapping + } + const llmArgs = { + inputMapping: { query: 'llm-search', limit: 10 }, + } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + expect(toolParams.inputMapping).toEqual({ query: 'llm-search', limit: 10 }) + }) + + it.concurrent('should use user inputMapping when LLM does not provide it', () => { + const tool = { + params: { + workflowId: 'child-workflow', + inputMapping: '{"query": "user-search"}', + }, + } + const llmArgs = {} // No inputMapping from LLM + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + expect(toolParams.inputMapping).toEqual({ query: 'user-search' }) + }) + + it.concurrent('should handle invalid JSON in user inputMapping gracefully', () => { + const tool = { + params: { + workflowId: 'child-workflow', + inputMapping: 'not valid json {', + }, + } + const llmArgs = { + inputMapping: { query: 'llm-search' }, + } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + // Should use LLM values since user JSON is invalid + expect(toolParams.inputMapping).toEqual({ query: 'llm-search' }) + }) + + it.concurrent('should not affect other parameters - normal override behavior', () => { + const tool = { + params: { apiKey: 'user-key', channel: '#general' }, + } + const llmArgs = { message: 'Hello', channel: '#random' } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + // Normal behavior: user values override LLM values + expect(toolParams.apiKey).toBe('user-key') + expect(toolParams.channel).toBe('#general') // User value wins + expect(toolParams.message).toBe('Hello') + }) + + it.concurrent('should preserve 0 and false as valid user values in inputMapping', () => { + const tool = { + params: { + workflowId: 'child-workflow', + inputMapping: '{"limit": 0, "enabled": false, "query": ""}', + }, + } + const llmArgs = { + inputMapping: { limit: 10, enabled: true, query: 'llm-search' }, + } + const request = {} + + const { toolParams } = prepareToolExecution(tool, llmArgs, request) + + // 0 and false should be preserved (they're valid values) + // empty string should be filled by LLM + expect(toolParams.inputMapping).toEqual({ + limit: 0, + enabled: false, + query: 'llm-search', + }) + }) + }) + + describe('execution params context', () => { + it.concurrent('should include workflow context in executionParams', () => { + const tool = { params: { message: 'test' } } + const llmArgs = {} + const request = { + workflowId: 'wf-123', + workspaceId: 'ws-456', + chatId: 'chat-789', + userId: 'user-abc', + } + + const { executionParams } = prepareToolExecution(tool, llmArgs, request) + + expect(executionParams._context).toEqual({ + workflowId: 'wf-123', + workspaceId: 'ws-456', + chatId: 'chat-789', + userId: 'user-abc', + }) + }) + + it.concurrent('should include environment and workflow variables', () => { + const tool = { params: {} } + const llmArgs = {} + const request = { + environmentVariables: { API_KEY: 'secret' }, + workflowVariables: { counter: 42 }, + } + + const { executionParams } = prepareToolExecution(tool, llmArgs, request) + + expect(executionParams.envVars).toEqual({ API_KEY: 'secret' }) + expect(executionParams.workflowVariables).toEqual({ counter: 42 }) + }) + }) +}) + describe('Provider/Model Blacklist', () => { describe('isProviderBlacklisted', () => { it.concurrent('should return false when no providers are blacklisted', () => { diff --git a/apps/sim/providers/utils.ts b/apps/sim/providers/utils.ts index 1826bb40cd..73d08735d1 100644 --- a/apps/sim/providers/utils.ts +++ b/apps/sim/providers/utils.ts @@ -30,6 +30,7 @@ import { import type { ProviderId, ProviderToolConfig } from '@/providers/types' import { useCustomToolsStore } from '@/stores/custom-tools/store' import { useProvidersStore } from '@/stores/providers/store' +import { deepMergeInputMapping } from '@/tools/params' const logger = createLogger('ProviderUtils') @@ -973,7 +974,7 @@ export function prepareToolExecution( llmArgs: Record, request: { workflowId?: string - workspaceId?: string // Add workspaceId for MCP tools + workspaceId?: string chatId?: string userId?: string environmentVariables?: Record @@ -994,9 +995,24 @@ export function prepareToolExecution( } } - const toolParams = { - ...llmArgs, - ...filteredUserParams, + // Start with LLM params as base + const toolParams: Record = { ...llmArgs } + + // Apply user params with special handling for inputMapping + for (const [key, userValue] of Object.entries(filteredUserParams)) { + if (key === 'inputMapping') { + // Deep merge inputMapping so LLM values fill in empty user fields + const llmInputMapping = llmArgs.inputMapping as Record | undefined + toolParams.inputMapping = deepMergeInputMapping(llmInputMapping, userValue) + } else { + // Normal override for other params + toolParams[key] = userValue + } + } + + // If LLM provided inputMapping but user didn't, ensure it's included + if (llmArgs.inputMapping && !filteredUserParams.inputMapping) { + toolParams.inputMapping = llmArgs.inputMapping } const executionParams = { diff --git a/apps/sim/tools/params.test.ts b/apps/sim/tools/params.test.ts index c026a4d2d2..87326fc329 100644 --- a/apps/sim/tools/params.test.ts +++ b/apps/sim/tools/params.test.ts @@ -296,6 +296,253 @@ describe('Tool Parameters Utils', () => { }) }) + describe('workflow_executor inputMapping handling', () => { + const mockWorkflowExecutorConfig = { + id: 'workflow_executor', + name: 'Workflow Executor', + description: 'Execute another workflow', + version: '1.0.0', + params: { + workflowId: { + type: 'string', + required: true, + visibility: 'user-or-llm' as ParameterVisibility, + description: 'The ID of the workflow to execute', + }, + inputMapping: { + type: 'object', + required: false, + visibility: 'user-or-llm' as ParameterVisibility, + description: 'Map inputs to the selected workflow', + }, + }, + request: { + url: 'https://api.example.com/workflows', + method: 'POST' as HttpMethod, + headers: () => ({}), + }, + } + + describe('createLLMToolSchema - inputMapping always included', () => { + it.concurrent( + 'should include inputMapping in schema even when user provides empty object', + async () => { + const userProvidedParams = { + workflowId: 'workflow-123', + inputMapping: '{}', + } + + const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams) + + expect(schema.properties).toHaveProperty('inputMapping') + expect(schema.properties.inputMapping.type).toBe('object') + } + ) + + it.concurrent( + 'should include inputMapping in schema even when user provides object with empty values', + async () => { + const userProvidedParams = { + workflowId: 'workflow-123', + inputMapping: '{"query": "", "limit": ""}', + } + + const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams) + + expect(schema.properties).toHaveProperty('inputMapping') + } + ) + + it.concurrent( + 'should include inputMapping when user has not provided it at all', + async () => { + const userProvidedParams = { + workflowId: 'workflow-123', + } + + const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams) + + expect(schema.properties).toHaveProperty('inputMapping') + } + ) + + it.concurrent('should exclude workflowId from schema when user provides it', async () => { + const userProvidedParams = { + workflowId: 'workflow-123', + } + + const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams) + + expect(schema.properties).not.toHaveProperty('workflowId') + expect(schema.properties).toHaveProperty('inputMapping') + }) + }) + + describe('mergeToolParameters - inputMapping deep merge', () => { + it.concurrent('should deep merge inputMapping when user provides empty object', () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: '{}', + } + const llmGenerated = { + inputMapping: { query: 'search term', limit: 10 }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ query: 'search term', limit: 10 }) + expect(merged.workflowId).toBe('workflow-123') + }) + + it.concurrent('should deep merge inputMapping when user provides partial values', () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: '{"query": "", "customField": "user-value"}', + } + const llmGenerated = { + inputMapping: { query: 'llm-search', limit: 10 }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ + query: 'llm-search', + limit: 10, + customField: 'user-value', + }) + }) + + it.concurrent('should preserve user inputMapping values when they are non-empty', () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: '{"query": "user-search", "limit": 5}', + } + const llmGenerated = { + inputMapping: { query: 'llm-search', limit: 10, extra: 'field' }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ + query: 'user-search', + limit: 5, + extra: 'field', + }) + }) + + it.concurrent('should handle inputMapping as object (not JSON string)', () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: { query: '', customField: 'user-value' }, + } + const llmGenerated = { + inputMapping: { query: 'llm-search', limit: 10 }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ + query: 'llm-search', + limit: 10, + customField: 'user-value', + }) + }) + + it.concurrent('should use LLM inputMapping when user does not provide it', () => { + const userProvided = { + workflowId: 'workflow-123', + } + const llmGenerated = { + inputMapping: { query: 'llm-search', limit: 10 }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ query: 'llm-search', limit: 10 }) + }) + + it.concurrent('should use user inputMapping when LLM does not provide it', () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: '{"query": "user-search"}', + } + const llmGenerated = {} + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ query: 'user-search' }) + }) + + it.concurrent('should handle invalid JSON in user inputMapping gracefully', () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: 'not valid json {', + } + const llmGenerated = { + inputMapping: { query: 'llm-search' }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ query: 'llm-search' }) + }) + + it.concurrent( + 'should fill field when user typed something then removed it (field becomes empty string)', + () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: '{"query": ""}', + } + const llmGenerated = { + inputMapping: { query: 'llm-generated-search' }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.inputMapping).toEqual({ query: 'llm-generated-search' }) + } + ) + + it.concurrent('should not affect other parameters - normal override behavior', () => { + const userProvided = { + apiKey: 'user-key', + channel: '#general', + } + const llmGenerated = { + message: 'Hello world', + channel: '#random', + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + expect(merged.apiKey).toBe('user-key') + expect(merged.channel).toBe('#general') + expect(merged.message).toBe('Hello world') + }) + + it.concurrent('should preserve 0 and false as valid user values in inputMapping', () => { + const userProvided = { + workflowId: 'workflow-123', + inputMapping: '{"limit": 0, "enabled": false, "query": ""}', + } + const llmGenerated = { + inputMapping: { limit: 10, enabled: true, query: 'llm-search' }, + } + + const merged = mergeToolParameters(userProvided, llmGenerated) + + // 0 and false should be preserved (they're valid values) + // empty string should be filled by LLM + expect(merged.inputMapping).toEqual({ + limit: 0, + enabled: false, + query: 'llm-search', + }) + }) + }) + }) + describe('Type Interface Validation', () => { it.concurrent('should have properly typed ToolSchema', async () => { const schema: ToolSchema = await createLLMToolSchema(mockToolConfig, {}) @@ -304,7 +551,6 @@ describe('Tool Parameters Utils', () => { expect(typeof schema.properties).toBe('object') expect(Array.isArray(schema.required)).toBe(true) - // Verify properties have correct structure Object.values(schema.properties).forEach((prop) => { expect(prop).toHaveProperty('type') expect(prop).toHaveProperty('description') diff --git a/apps/sim/tools/params.ts b/apps/sim/tools/params.ts index 17f9392831..28f561ae08 100644 --- a/apps/sim/tools/params.ts +++ b/apps/sim/tools/params.ts @@ -395,31 +395,39 @@ export async function createLLMToolSchema( // Only include parameters that the LLM should/can provide for (const [paramId, param] of Object.entries(toolConfig.params)) { - const isUserProvided = - userProvidedParams[paramId] !== undefined && - userProvidedParams[paramId] !== null && - userProvidedParams[paramId] !== '' - - // Skip parameters that user has already provided - if (isUserProvided) { - continue - } + // Special handling for workflow_executor's inputMapping parameter + // Always include in LLM schema so LLM can provide dynamic input values + // even if user has configured empty/partial inputMapping in the UI + const isWorkflowInputMapping = + toolConfig.id === 'workflow_executor' && paramId === 'inputMapping' + + if (!isWorkflowInputMapping) { + const isUserProvided = + userProvidedParams[paramId] !== undefined && + userProvidedParams[paramId] !== null && + userProvidedParams[paramId] !== '' + + // Skip parameters that user has already provided + if (isUserProvided) { + continue + } - // Skip parameters that are user-only (never shown to LLM) - if (param.visibility === 'user-only') { - continue - } + // Skip parameters that are user-only (never shown to LLM) + if (param.visibility === 'user-only') { + continue + } - // Skip hidden parameters - if (param.visibility === 'hidden') { - continue + // Skip hidden parameters + if (param.visibility === 'hidden') { + continue + } } // Add parameter to LLM schema const propertySchema = buildParameterSchema(toolConfig.id, paramId, param) - // Special handling for workflow_executor's inputMapping parameter - if (toolConfig.id === 'workflow_executor' && paramId === 'inputMapping') { + // Apply dynamic schema enrichment for workflow_executor's inputMapping + if (isWorkflowInputMapping) { const workflowId = userProvidedParams.workflowId as string if (workflowId) { await applyDynamicSchemaForWorkflow(propertySchema, workflowId) @@ -571,10 +579,60 @@ export function createExecutionToolSchema(toolConfig: ToolConfig): ToolSchema { return schema } +/** + * Deep merges inputMapping objects, where LLM values fill in empty/missing user values. + * User-provided non-empty values take precedence. + */ +export function deepMergeInputMapping( + llmInputMapping: Record | undefined, + userInputMapping: Record | string | undefined +): Record { + // Parse user inputMapping if it's a JSON string + let parsedUserMapping: Record = {} + if (typeof userInputMapping === 'string') { + try { + const parsed = JSON.parse(userInputMapping) + if (typeof parsed === 'object' && parsed !== null && !Array.isArray(parsed)) { + parsedUserMapping = parsed + } + } catch { + // Invalid JSON, treat as empty + } + } else if ( + typeof userInputMapping === 'object' && + userInputMapping !== null && + !Array.isArray(userInputMapping) + ) { + parsedUserMapping = userInputMapping + } + + // If no LLM mapping, return user mapping (or empty) + if (!llmInputMapping || typeof llmInputMapping !== 'object') { + return parsedUserMapping + } + + // Deep merge: LLM values as base, user non-empty values override + // If user provides empty object {}, LLM values fill all fields (intentional) + const merged: Record = { ...llmInputMapping } + + for (const [key, userValue] of Object.entries(parsedUserMapping)) { + // Only override LLM value if user provided a non-empty value + // Note: Using strict inequality (!==) so 0 and false are correctly preserved + if (userValue !== undefined && userValue !== null && userValue !== '') { + merged[key] = userValue + } + } + + return merged +} + /** * Merges user-provided parameters with LLM-generated parameters. * User-provided parameters take precedence, but empty strings are skipped * so that LLM-generated values are used when user clears a field. + * + * Special handling for inputMapping: deep merges so LLM can fill in + * fields that user left empty in the UI. */ export function mergeToolParameters( userProvidedParams: Record, @@ -589,11 +647,31 @@ export function mergeToolParameters( } } - // User-provided parameters take precedence (after filtering empty values) - return { - ...llmGeneratedParams, - ...filteredUserParams, + // Start with LLM params as base + const result: Record = { ...llmGeneratedParams } + + // Apply user params, with special handling for inputMapping + for (const [key, userValue] of Object.entries(filteredUserParams)) { + if (key === 'inputMapping') { + // Deep merge inputMapping so LLM values fill in empty user fields + const llmInputMapping = llmGeneratedParams.inputMapping as Record | undefined + const mergedInputMapping = deepMergeInputMapping( + llmInputMapping, + userValue as Record | string | undefined + ) + result.inputMapping = mergedInputMapping + } else { + // Normal override for other params + result[key] = userValue + } + } + + // If LLM provided inputMapping but user didn't, ensure it's included + if (llmGeneratedParams.inputMapping && !filteredUserParams.inputMapping) { + result.inputMapping = llmGeneratedParams.inputMapping } + + return result } /** From 38e827b61ae03b229b2991ea324068bbcd563f09 Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Fri, 9 Jan 2026 17:37:04 -0800 Subject: [PATCH 09/16] fix(docs): new router (#2755) * fix(docs): new router * update image --- apps/docs/content/docs/en/blocks/router.mdx | 55 ++++++++++++-------- apps/docs/public/static/blocks/router.png | Bin 34209 -> 10815 bytes 2 files changed, 34 insertions(+), 21 deletions(-) diff --git a/apps/docs/content/docs/en/blocks/router.mdx b/apps/docs/content/docs/en/blocks/router.mdx index e0f916fc83..44bac918e7 100644 --- a/apps/docs/content/docs/en/blocks/router.mdx +++ b/apps/docs/content/docs/en/blocks/router.mdx @@ -6,12 +6,12 @@ import { Callout } from 'fumadocs-ui/components/callout' import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { Image } from '@/components/ui/image' -The Router block uses AI to intelligently route workflows based on content analysis. Unlike Condition blocks that use simple rules, Routers understand context and intent. +The Router block uses AI to intelligently route workflows based on content analysis. Unlike Condition blocks that use simple rules, Routers understand context and intent. Each route you define creates a separate output port, allowing you to connect different paths to different downstream blocks.
    Router Block with Multiple Paths`**: Summary of the routing prompt -- **``**: Chosen destination block +- **``**: The context that was analyzed +- **``**: The ID of the selected route +- **``**: Details of the chosen destination block - **``**: Token usage statistics - **``**: Estimated routing cost - **``**: Model used for decision-making @@ -75,26 +78,36 @@ Your API key for the selected LLM provider. This is securely stored and used for ## Example Use Cases **Customer Support Triage** - Route tickets to specialized departments + ``` -Input (Ticket) → Router → Agent (Engineering) or Agent (Finance) +Input (Ticket) → Router + ├── [Sales Route] → Agent (Sales Team) + ├── [Technical Route] → Agent (Engineering) + └── [Billing Route] → Agent (Finance) ``` **Content Classification** - Classify and route user-generated content + ``` -Input (Feedback) → Router → Workflow (Product) or Workflow (Technical) +Input (Feedback) → Router + ├── [Product Feedback] → Workflow (Product Team) + └── [Bug Report] → Workflow (Technical Team) ``` **Lead Qualification** - Route leads based on qualification criteria + ``` -Input (Lead) → Router → Agent (Enterprise Sales) or Workflow (Self-serve) +Input (Lead) → Router + ├── [Enterprise] → Agent (Enterprise Sales) + └── [Self-serve] → Workflow (Automated Onboarding) ``` - ## Best Practices -- **Provide clear target descriptions**: Help the Router understand when to select each destination with specific, detailed descriptions -- **Use specific routing criteria**: Define clear conditions and examples for each path to improve accuracy -- **Implement fallback paths**: Connect a default destination for when no specific path is appropriate -- **Test with diverse inputs**: Ensure the Router handles various input types, edge cases, and unexpected content -- **Monitor routing performance**: Review routing decisions regularly and refine criteria based on actual usage patterns -- **Choose appropriate models**: Use models with strong reasoning capabilities for complex routing decisions +- **Write clear route descriptions**: Each route description should clearly explain when that route should be selected. Be specific about the criteria. +- **Make routes mutually exclusive**: When possible, ensure route descriptions don't overlap to prevent ambiguous routing decisions. +- **Include an error/fallback route**: Add a catch-all route for unexpected inputs that don't match other routes. +- **Use descriptive route titles**: Route titles appear in the workflow canvas, so make them meaningful for readability. +- **Test with diverse inputs**: Ensure the Router handles various input types, edge cases, and unexpected content. +- **Monitor routing performance**: Review routing decisions regularly and refine route descriptions based on actual usage patterns. +- **Choose appropriate models**: Use models with strong reasoning capabilities for complex routing decisions. diff --git a/apps/docs/public/static/blocks/router.png b/apps/docs/public/static/blocks/router.png index 30942f2988e1f9016ab4984add45f4ebf688a018..c66f3039e7638fd93142c93f61d9947a83d161da 100644 GIT binary patch literal 10815 zcmeHtbySpJ+wOp)0*XVZC@9^jq;yIs-65dT!q72v2o8v}fP|!kAfQM{DT>_8X7Vr#`bxaX@*Zq z`cKjz5C|Rwe>r<{LK*&hS5<`C`P6ofLe;TL*GGS6B$D3me0HL4t;^}N{+(?i%xGZ{ zX(>L%c^>@|LD3?vVEV0e{bG1p#gpWczT1 zjU(KyQ$uAZ48O+%`)pNu;?s4xm~7oU7mG^8VO(l2Hbr0+=U5ksXX_r{m^b^87|Gq2 zp2ldFoVocT9IUmjg}#!diVBP!JQKk1&^9nQc)|leO7Mfh&c(ifT?Bv0z)vm%|3ALs zMP;1(&u3DoqO_K*k`nla?;B&K9(S+;_O| z&`S`}($b1Kn_G%#%E|w$IVg$KTf4eCitzAwczAGo@NqjhTk+f#78d5Y!^^|V%LQt1 zxp>;UK0$HWyDU_`hD`$!Oc~io*o+LKmYx;(*kAl?~&|X z{xvNyK_2J}&t2|2Jpb7@XetK16;ZcAS=brK*&xC4fH5R^h4{qI>i@Sd{~qxlE%pE1 zl22IppDq9K<-fMnaj|fgbwGk4T_yg#GyiJ*&yW9VD8>WL{2wdvcbU)Lf_0W4665*L zJ(D2Hq|UevgE1Q`$w_OY@UU+QqqPU>PW}1LUA|-2#Ez+d&KX1?11BVU3Riyc>1tM( z#=ZgvkGI9Awz>J1#KdEf_u2iWpUhts8H2Lr@nUP0Vk!vGjq4){!b4Rxkp;U zDX^;}pLKP06EyE>UjW;X!Av+eKVL=l#CYuG%a^IB)O${i#*5ENZ-=dNl&GW#)>qF6 zT6ce?6tZ8KGyOVW8&Kv1xWX+_tg0)Nwta>>T7DB80y!4W#ku zrF&SYf*dT3JagHFV+bIJat;fC%Vp7-I#j?-L(!JnKTa0e94o$N--bRudl^vdUwyqT zDUJ9W#f(OYiMUYqa$7vd$k$hbkW=M5;Zu5A2Spd7M@W3=sBs?V;ct1UL?WP%RZ3Dr z((=?w+rHA$cv{YJzwYZ2NYTE5^W!iH=X(q!aot%r&tHdK*h&w~4g!wN@XMGyjwAkX zygsSGdjM_n+RAfiP_S_C{SBR(`d(ZtlimkAnsD;M&gdYLfPi&ExQQ69dg#-!=(3T? zRO4%R-H}aA4kP!U4;It8X|Cs#J!5|k9itsMJ{R?Y$5n%q3F6*fLQfSOmxD~q{Di)B z4xg&w$l9v@m$-!IIhxS2#}k-`pseTL4LZqP8Yo|%pBngzYo)kf$eZKjS!8Q);*qSs zaFh#1C-3Cl z=RI0$S?eM2fG11ITN&B38R#@>7~?42`du+ATCF${c^dHnSEFGX2o7AyG5IL%Ax0gh z*E7zhawQ#{Q(Y_RhIu~mtxEJzh0A21!n@foTcf!rYyOIxQb(4H3NwO2C9>a(0-VMa zFF;dyPfJo;X#A(}QL9GS@}SWaam%q$p2BK-AY%C_qd83QO6L#{30bt zL!lIOTPo$r4lmpDeth{Tok@vPm)aCbhVY#f-5s_HV;l8aDXQd@l+^Uu#wBtYRSbWR zph6CQFc3a4e^PhZ>g4z!aywrmTT?Fe8L+aDF4|>=ZYvH>o3zpY(`v$6cAimWiQv<* zq!;mg0N%Or#oQG8(slpSBVA3+*hpr%A}y_a^p)Goy_24cZ(oUJ;OdXoT)MWVU zLKO+jgQZl%(A>vqj@JBi8u{#~gHzrOf0^6@ltn=SPfXcjy8on!|DPT8LDjb+BV3i^ zPt95*%ZrU`M*H7uVG|pUEm*}Ts8o{qKe43y^kOX=g9+V__cvYFdk0EL<P2EyQEERg;DWC5p>8s(0x>5F3<^0yALEo|bqrPn!6=Hk?3bZos3aPx!p zzTPVnHI>F-@XWI0;op;W-YToiPiZPAy$XqGB*f^`2!P{@63o3uF)q&%tW$=)lAsKE zXLPS|&&bmVpMKN2FE_$FW%yugQP!Z^ZN)Nx$V$sfli^_bYsXhA4cqG z`RDZGIhNvakNU0KdlLKVO!HI?2b~U&4wCq+Mh|yZut`>FcjGr39sG}s`Yu(uuTLT+ zPY=54tgSRt?m10OMGwGEJ|E=`sQi1*^pJoo4dOc>wZIXu6o@ZoZ{r_Wghs7!Q1&a z>R1eZ9!1_3t#+QIN$R}1?RSkwY`a@FH>z>E361^TlkGm?HYDuQ7fQ;oN;S3r!uX5h zx137*iBYWSdS=6Hr0974Ym&~X&t5xLe4~t^p=enQ4S_jvU@94+YxSK*aPBW=c7kM^ zoaoT^E6==jDeQ#5*LBx=y0E~XUqrZTl+@y0CYp)PL;1-$XO6DC@~Y zN%{WxaHyfe!EY-?*Ra6q2MhIf{harD-4J2uvN~!mMqzdCM;u+AXM^9LR7b^IeusVO zqc5*x+Bebkt{{MHV9$l&EIOQ?#XMbwkpxc8Qz|b2(?J+GHPCu_jMY{V5bU5dh;%J%ve-_ zzLy}o6vZ0VVbY$!)yIs7PqIN5m`Wa{ra+TU6u?5Xd8l$AmRDtIlA|RB_G4jf&4TjT zK!MJ0o2Kg7bRh>544PIeN}iesM}cabzTFO-)UGQ{{HIl>=pXZzrb3p%%H4J0J^^y& z`ZpWhWj0!SS_~|{jFu+RH;Jb=tT%}9{K+uFn*{5*a-F(qRSz3u@_z-dvm{{~j^}v< zy?5>Oo52#Wq_^Pspf zL;Cr+$39Kb!>XgT8irl+G&GVx>T3hs`PtkwVSqg8bXUOx^FL@IO{G1eE&aUSWC7dM zDCsL3jg2xwMoN*Jz%NFUUhx+$wNRM4W(Qv9GQ5W_lPAM@gB3k>99SEx>^h)kV24r8D9wvM-F!oYt z->csfuRM`G(i_vh2v(Wk3Ut6q#u#B_Bw_Mj2-_b+J`6DC#gCWC9DwV_tf3R4_)w;n#wu2pnXnEp>!+{dJ(J~v-naonM7pBKE z6!sH}fT3g-*mG=Js^RxJ#sx%`Z6ER{Yj~HR78zBtQ$fpfBd`|7mSB-2!aQ+bucA)R zKD>jW+9lz+E=3NCZvA_;?uI{QYsd8yZDX~2WMnVi2>2zaSM0HcfX5WsZ?pRUlK@&wr zh}j_D>utfBca2M$9Df>EKK!a_neRGMEifaLOKW**iVAVZxRzgOL0LJXw~~bV3o}L^8||7-~q51(g4*t5~$D*;If-Vr2BNnjvGgo(tQ zjSGhH`P^8WpO4wrl?EgW3w*>Bot`efcT`V`c*_}9YbcBdTk2V9h-|#3syR^G?tut%)>!HsRBt~DHZ$EIk=+7Kn^zUKi9u0x!=SmHFa)6XOS-J@LW8^lGvJ*( z&kqHf0jFSYCo6o0m;WCOgSokB-JKEWwl+RuSZ4iXWLqZwRXX0;xKaWH`wJH81NQ{= z%zu3i6m(zJx9xqWl>ZWn&44Dw#_O4Ua#{QtGBfgbPn!wgx?=p*`)zvlPwTw*DnQgV zVp~#YdGX4vk6t^=22s@Fs|zU(?hG-Ij{5feB6+q>#3Qe*A&Q3m`$nIhhF~Y_YA6Mo zd3u4=qXO{WnIhEnGhLFV0VWKs-6N*JugOI(5HxGSWw@^;s%_*s!|B#U8K--x9pntH)sK&Ry;M%Zv;9RCUJ~*Ttfk7{b zsZP8N_Z;KnZQ&Y|@*WDthK9-N$m++EKp<5B4VNP7Vc*Z2r~0;3K7!IzubV&_EY&TaKY!l7#{cxB_-$(x4PqcijO^1w zXG+bNT$L2V6hWlj`ea?EU^#MZ(0E_RWT~K!N}P3lXUNonMK$$K(r9hQuCd328_?@B z^6H~%8X7U}F4*#+(fRg7l~x6?)Pq^-2=f?sLX*RDmS&25I+F!lo-%3N60%R7aOo1)1wnI-^Zd&l z?AQ=?ePg<*cp{P8*crRjo%y`Tq;3>zg!J0(QFx149k1$JtKCwKv-tqR{IE>8!n{Qm z190|l^wBCaq>7^Gj(bEpgy>#3rz?o+;dTXjNlXHl(W6*-ajiVnayG5p;XXAjRlBG8M!8!<%#e@QhxS>zVws1X8BFZ z7Glv)GemTBUh@S523|VuKu@ zgJ{D*qL_}Bzd$riafpIbfrLToPs%V&?@i1`FZdr!M?S2vb_% z@a4}6yKX*%3$lFqgE2Se=h1q@I=toKEC8;Ji+F$3+TkX`oR7YAbDPxa3yZAyU(^&6 z^KK9+WZ$$NDb3ls4^k(s6WGE^_LS0EYB7-b@MNImDgN-GV@!G3NP;9pP2%8PBc`cP zF6$jXPjI@vb1XS9_>{E!^`W&;c&U5vY1Gj?2A_>Oxs&m8qz}S&sJL98;-F@&x}AS) z=?6=?s}^d&=Fr%40xi!-mW&*$$a!Zf!0?bI{MPO?!_5+BEKhAGF5-9Pt?BgB*#$n-|bOm3zG)od%{&$#b#iu zXil_yMoPF+!1hPYS6jTyHC&DB(#+rkYoa2%*JQLR1AmUDPS*~lWiq_#LeSH1v&-oW zhcYq=OJS9$ zrs^p?rX#b1rIlUtDi1`HgW<5)^ik90s4r-ByuHO=E6e%pYDrz>r zQ%$rOQY`wPY^6it<(UWKq61u4hkJ~NcQ6Ry29QTsp#C$R6dOy57~tT01DD(Rp;Z5D zfZa+rP6hY7%S6P zKZ?$8PRae?4rOiqNiocu7!IL0?XDFg*$y`fE-lslHth1M;SOfH=zZm+Vs?R7zY-ZC z;RE)d-I8l_1!@c-8yUVfa#?1VO@C0QvBV%FUTa4wI|5FpMZsNX^I5Ee)$)JzYh37k zLQR~z)=^ipXT20Z>l&yQ@0f3ol^ncpeDoj}89%oCCxP|*C^`{+c5Fc=W=To)eo;Rj z*hd(CAV}cCtG$KqL4_r{0Mt7)N4X5(QvxXA+da|#4WyMw6cL~x{^@)0*MQ2ATBa$5}%)?z^FmN>x(7_ zMBq|@!0{*QZry?k+aP7#+Z1p3H#G&s&F{7OUl0PkLxaRIb#Xrts-B)l+6s-&=EYyh-4;peSPbLgQdFKyvp6=`Ibq@0;!A_1n}O)n8ZY@ z2r8jSDORHl2|omYF`?VDfTfT9Sj%VtsIUbBo_QUV5uDX*y4c0O|W|Huc7qvynz!r19wng2MvP@OwLYH)x~tuLnwn zD{#^m>K2s(hBT~k$J&8WGwq5D%S+cLYK)wmJ^}eLR$)IuIjwg?!smSaolJdWzEbp#10?-ZXi5lR5&kM@C2Tc%wgrql<(h6IdSVbw(~ z>i8?*IkzNc_>&p3C+mKN`42LWJ<~#6uD;tR@08-~7CMsb`=hN9KWH+eZjAvbFSqH- z4S8804m6J-VOX$C_LUGofn(WG63?JiTpfU;%wxMVN0i48ypBi;7mL%l=W`&v@bj&0 zPEHQDVIz=@szO;CxF6%&y$^Y+!Vf<_X^I8GZlv`Tua(Wt;#QX=4cV3XxFltUR4V%~ zc$=XYBEdnXEhK;&ip_QDSK7ZlSFEA1GFbFB`_V!c5TRA5qGr3vTF(#z0~3UCm2*o{ ztk>Qe_P+5>rLry&IkR$Tej%{i-uI=lv9x{^nNxtA6fK$%lee7ovFR@0$~)MuOrj~% zf{Jro)E5!dWl#){&!I9E-yq%RHege&-d=lch3+ZNUlGN4?-NdYSUQ!)@K@eP(CHiD z>;tiUlOqnOw__aW4Z9~A zH)mTpTZREmUJYO zOTzJ$Nd{&9IHk|o?2=)c4fD!F)-9LN2%1O|$_VBxVg-Sqe7%x1su9$DxdUPFF>8F& zIfU3_)1%QL_PLT&G@w}+yfjcs^(8uWmdNmZ(H;L2A0gx((1jJ5vgqn^3d7qHz(+-W z56ea&@%s&7+(~c3$rH(wfol$;`$gn@}{By6bzh2$svm5{7WM8BJ z1TLS}51ve&-_t*BRTkqVB|^^7eIg@}e|m`w1$|8f)G%g{r_%J4l`N5z9&1h^~PE&TX zLa<6Z1PHIn?}Fno)IotX2&XUbzdSp*Zh*>))A4ReLLEE<^!0x~4D~C0V(Y6Q!%rK{ zhQHJ1QCsh__@>6x_(Xf+l-0jrZ8x04Y(eQhK0}nY`nxL-KpMzAemqgVyY=6QtGn`J z_wr)i9*$b^vyO>Lnt=WIm$pd}@7)i^%3B|c+b%?(t!)Gx*Q;MyaOE8eTt&1}`yX$z z_jD%lwdxi>GO(Z~gV&ycSFikC3QknEM7>HV*P=ZZA4<#)eRISdO7Ju;i9cKhrx>DfA3ZfgaF zy#T|<0HYX}|ASh>$Ew{5-wM0rHgcN{7}JqULfpF_E%h_J6)1h=(Jpo{SA~iN?!C9> zHZhzjdyxucrw59S$G*S1IUD>006aIcKVO4;4Wz?!dm`krGBUneb|%lmzl$QC4q$W6 z#Ov1*h$b22UxkXSEQ1VX8HZkR@S)w;0K5vzPD=0P_j&Y|k0;#6kh`m&yGARJV~A?5zhF)M+%Rwv+FyJSpzqvodO=LS91DWT zF@SRPxcY;IlxxjWXRvNmi*jOJ2car9C()!7A_jo%>~ng#`fh0U$IvBdKo^AKAk4EY zsoOOi=RN?4mqmmoDmJMT(XRjKGp(|<(3$=7SFfV{0?&bbqpXe_deIDoHURh*CgMle%166i5VC7>(gu?C34-EP4Wxuc@SqPcPtOIT2`xS>b)n2 zWymqXXKbBcMf{FY%1&X1;I0eoummHx=O3}*RoPkUYcUwX_V;?2g~U1? zVi_#BLwh-j2Db_yWzkpes}!<3g9L>A+~!6TVyrjk;RGcDy9mU7WnZ3Jje)*SJPR=} zSBeChXc4>?-)1ClV)60z1tOlQKAgkUb7Cz7Lm;atpf!%O>f z4?_jx1>=xf1z_i}eWssun)70LI#3vJJsfY74%eJyvQx*$6>&Flg5otR9)`q4_dqnvA zMb(q07rCV9NtV}K--Kr99s@yYKb-4fef9cv%S*DOXZ5(?+b5ATGf(##tL(i0JTI}6 zN-p)@-*~cz&m18Iq`Ealo1W%!O5)v|`ElqnJmUo+D$NZV4M89bUqJS&-j%In=|+>Y5CCLO zn;EF-djA4s5Qt9#gTYnm8T`RELlr_@{}w>Jcf=DVz3^8;lJ(M)kF$~?^-95zF z^M3lqcb)U+xvt-J&i4l}?Af#Snzh$@p8L7)d+iWqMOh+zYJ4;_G$Oed&s5RSFw4=< zFgS5>fFlNxT$R{GQzis@L-v13>h#0KoA zwq0#4iORkCU=lU3nrS{;3B*p%kyGyVYlOm0&687wc+Yy z6R~)@zoCZc8^1=Ei8Ax?*`Yi&F*l{@az!%k-0d<_XJ3=s^dBGanJC=r>D|jF`x}b! z`oBS!VnSP0#HzT|4BbRmbph~8X-eF?HO4jw&3f@6Hyub_ z8(IKR#Kd_YCTQn>#w5NcP?h-?=0d|r(opdfGwzDXH!ppR9|LaekL*@wSTXIxFlvv0 z*L&TnW6<~;IYr?qJBry%Rk2rnedb?^_eM>-f|N%xkaVs))})>JFVN?ILZIv}QH08@NB7DW!=;Xwj`ox?-e^adcc9VVSy(Cw$QvrW{4XL}Ss!f?vRH%lO@mJm| zWB8;Ie?olt4Q~C>@meNXh|S(osHTPkqha0R^tzc#N~+t;>mR+Ir`Xf6I)hIygaN1U zL4>oOU=XV+r~Typ{tM>zea1+MbbeQ&Zjl2Z($_jBg5+}qLs}EBT2_Ap;l@1z$|2&0 zAKF_0YMvZz)*vUfYpKI0kJWtrpPTO=L7w`h>990{W9H>nytFmbr%r_tvb}Ex6686L zevfoBmw~d_<%-Qd2XK=s*}c^NKyMIcd-cZT^{<;Os@LcUi{M&N-A$FY&I@ht5g;w* zvSy^p`3s`{-!u_8@y~t~-rl>+XZ!RcM$BAUyUUPA?F#QKYaAN2`&SeAtbGqKjH-=< zrsc7Nqzo=|`(Q0qeTTg~X{HhDqroz#giSK|`3_pRbzDj}r$>otr}IKjc0<(&*q7LU zj|Xrcoq_=PY#Ni{$rFZ=F#=)Q8--0NG*=BTByL=d#&B=1Xf&5?>f_CquQwI`wVr?q zf%x890{7b|wzqbnH6v-L`AF34Ee};;>$xlA?a|u@lX_l=T{lCMt;frwP~)H)OpCn} z`)f+X$Qc|gb*5F0_N6Nr4{*5Wb&R`L-VH`-s{5;0_L9o@zTGJ`De|c|*}%)FOkuno zrX;a=-iJ(rf7yI0@_Uf)DyMc~5Z`^hJixd6l#9ENCG31be3-zDEFopNEx2Dlk?V+^ zS1@5i?XQWCjxGAMaiu+3CF1LJ=(s6VqDTC&CvCVtEJq=MRL|Ym1KR(GVg~490@Sus0L+JOtr5=JwF@%eQHA8{+AC(@C; z>RrZR;vQ7s`+$A#4Sg7mkd4%)`$~c-PMHoHLHLZr3AY7oTw619=D91$Ppg&QnI966QofJ191zVfU|1O z4|C#ef4uc8S?sd@eA9&ew?plsEXt9{<&0m0{Yu*)#eD@2tCATThXI(lw*}&;Vj7bi zd#T#O9=S65^B|pf;x^VI5wyAG0)NeMV^aED4y1@-LTyjCHn8AqHP5W#v2X_1jpIvW ztxbly29al+g2g8N%?XGmE6brb*1zy9~BB$Iz zc2__eQUtVi2?jO4 zrc$=E{O>X_MmKdT<2Xhaap{~Sg%ImPem-0HDfa%gss5vU!S+^2<|X$5u%n@wjDx4#w1^-h~?U zl%S4vzW!cUCEs5Iiq9Gad?jW|&};gbEk&VYjES-sH_7)8vTv1pW<`fe&`Ef~^C@(l z`itABE$?=Z9{tD$C*Di(y*}UDf+2!@uMeSb2QuY8#5uQZ;Mv*!bZtORzfCZX?WCo6 zW8j~E3H7;H--~OwWYY0I9-d#0(=VKfNH7BCerh+?{iVEa{Mu(Vmde`cKe^eGAMMRH zIvOC=-)XL-nI9NH7)aP%TjR9sTgjW>20u{u@_*O+NG4sW*Gt-I36vy`UwMu5M9Q<5JQF^ha%_(0DY(LdOWs`0$EYddLC;!Per801CU|If zKK`1sgo>%2^`&HTGZ+lWP@9?WY~T+NgC;vytYb*7C18lC4qP!J!r zJ`nh}JEv<|_`~GvCq;b}wvaPL;odXIf=yY!Ou2;cO1#leD$^tt&*?`>#3D;kR2W(p zJ$iOc*z+l#JDm_puF?*Mx5vX8Bs4ZvZ=usEMiC-E_uJ{{K-c?`rwKa*zoOlm9*~=M zI6O~l!!h`8_1;Rbf3^VRSc7*9H_IH>hCLVK#Ed9p;*o7K`}Jrv42?K%jo_CqeT>zS~u_o?9jJmikC7QISm}OXbPn7K;DmI;R4u zSr>2J?zspyZaaT3B5N^BEsVu$i?{9#YJplWmybvFNpey=HVCnwHS{RH;=<9CTffCs3H;YvOAgxJAoFiLbkyMR^*<#Sb#m4YJ zTYuDIZSuv7aBEQA;)C4|rEf@&v=8^X0-i(iN%?ti50WW~f4-V_TMVC%^u084(Vwa& zU?Gz{_#FA(wf-=<{L$&GuW!}yh_+=BjYL`=KcjH_GkwFII{9?><6z0lc_|_exPzz1 z#f(_I$>FUJW?PXdfsFDv*G4ymz{nQR0SVkkv#9*yFP7b5bRd+y%>BV zy$yVj+$7m~WWoUfLRxX;>X>JJ{b^IPt?tQn^Khn%HQA?=DUP4eVpQ9<&aUN->=B-f zW-Pty)$wRiLC{3Mt=A&Wd@F^1KDb+Q9EfWe6c~o0`z>U!)Yv<@<1!QNUsXavGd=vx z`umd)Gjvz=DaPv$f5Tr>piXcx`BAOG$SO4_@(O)bxxf|c2fj5##qKbKx?Ltu#ZJT4 z2XaMvSxR(aeL}%Ls@?uViLo(L|8ZsYqc4@9Z%6H49vkvA1a5Qg=S#ZRLK*kIS9*Tnrbw7~*X^$2~IBtGzmz z3`P|iikw%B;8-QetE1wBF1FK7Hb(rLWiDSl6404JDzo$Xa@_79)Op5z6>by$ly8yY z)GMnZE)+XZ&MKICR-FvoFbH`Yn3)4%ATwaVe^g<{HCf2c7;}J@IA>Jsk*hh_Jok1uqS@FkPR6?c`#F7-uHf3C~CJYF#N9 z_=a3P{DTVPqHf|xeNXvG>QzdxalHD6>(Z?oV=~G`yzLz;pW`?ap(vQ^?0$@5T{oJmcrNzBJ zsMUx{n2ElunC0HxdL@pBzE1Ey1&z>W|@@e zIgXc)Xt7xpH{p1XbQbs>pj(SJ*PYHJ4sX%=Uhex(!lQEgFNbnK0-5EJ=CnA6UZc9M zM2LT+kciIfR@7&qDKtvu2Ce>M8<&5Ua(*>e|QS4=%(> zG{7EEBIJ`_5aA`;-Yl$YzyuNYNcjOe#^m+RqsiX7Km0(J=am-+6^_%!Iuzb!cy>Tj zrPbrw*5$(&3cf)4gH4Qq$Tn-MRv?xbaKFKpZRd^fs-#}EpjG}jMKXBAAanb==e~$Q zMdg%(yjOyRgJlZCdTJmsb%kFnbNjiL*})4q$JKlA?@CDOZgIBY%d=c58Z2C^^9&`9 zkjbl-iD9cJi8sz_H@Xmf$&)c7BJ3ZlmXN~gjc+|hB5@_>Om?O>85mxoS))zir^<##itvTb2MS1(+#3!c-^S)d=wTHbP(UYR#)Q3f0 z%WVFW9Lr?VNC2{>K({P^=Whofe{)KrOL?|k$w9rJ_x?a$eesay0>R_*K*SlzkwOjJ6iKMh2g47QFU2jUA zqUMx-LQOk4#YgnRFn{ZE8^&jFOBkeqqjHM3Z%V)#M`5lS$j3N1PhSpB&YVeiO;I+0ZbA8Tn_J`TT#+B#!u%wfedhz_xW8^hMJ-XQ!M}) z%F9<|iB^a#v_hF>G!-$mVm$%S@`)3~a9DS?5XxtDdU=Q-InG9K8ztiv)gHV|&THl~ zA2*JJV`$lQ#$0r1#9{u7uc`kphh(LgO|5Ttp1QrQ6Kwx%ar z$nC<^tQhAVN>UA#o1KWKTt8n)w2k0m+poI5*fh(}_D|Kni8t&!?_L9@jMj|^%YqQp$(p@ND z@Oq}uJ+t^gt{(q3MTTrd#}N*)z4K7=po7snds)9nY`6AYj;~hFdH6MQh!?@#OwY_+ z?dSu|+s{{BNNHqAvinIW6J~SSspK=@SMld_PCPv**_CIdlKH@eP0rP7{v&NxV{B8m2+E zOM16k=$waC-U8u<;IM~JX=ggkSI!zaA551r%_yuNeTAI80dapi1V9$g5tTi`uVvnM z2PkD9sD$mbydXFlZ4ek)s9=QVe{a|>Hxe_}x>Y$PM}}? z=SzJYiy#2S-GBx+>I|Tg7yXezsEIxwL}P%&>a_4u3xBLY8tGq7RR>F%a<137bt zkny48&E!)Oi86%fA^}8*&TY>BEpXze=u*!OR8i%hx;Nvw3&;8PXQG`P$ix&LzPGlD z7bxz1)=XCjZ>>N@d-faGDSKJ<46mE3)8KBdhFU?yXIWaR8DLtT{y6UcBltck=%geP z`#_;Vdk4Lz&KHG9i-EvD9J`Ds&}W+LtPVAQdK};w)`d&&#nJh z1p1q((Ex<0YGc!0d4}7T%0?~&^wAR&{JG(WWs9FD3^3NvbL0kbYS-+zI<}p0dk0qP zQ89dkj=!MwVc)nTLISo@K)_L^%#p~i!Q8P}q+&S76d-LOvdJ`3H>&6PAzBF}(gQ&M za!p)up=qbVts=pFUnN`@ps-n{!gyEPGoIUVn@ywf2c2zQi-fmkM14DrRk5_&XwA%50WrnZ)(>~0&11bz#!V~l+U0Ei zor08^{ZrN}>()FmJ9w9SYn?J18A90yR-bv(i^(|-rBVLjJS1yEkWrZcTV@xiPFAW; zFDtSZ_0CLDsyX?}eVk7rV{zDAtqUO-bV``KygfzQGms?WOOUM2*ZQJfo~zC&9!Oi5 z@HjlmALMX@QJO<(z4aS;-!}o87#lLu);i1})#y4P!J0O4pkGP?Y{TBspi+CK_p3kG zZR$3L6EtnI$AC1Dxqk17YZ8>Otxj_|H z=p5(CF2~((HxT+ud4JUwlw|;&mx`y`gobV9BQ7OOv8n7YO_&lxeETag?Q<#ZE|ccK zu~A|UZ&&%>cT_nw0Ep}4?EElsCyY9!I+5J3IsV(J>imQRBrf{Fe}vlnhMNFe3qd&_hsBIpXOXSJH)$;xBNuD`byp;7s)E+?Y|231>GKk+@<{JS9wJB5zI9PZ5_!PY@QcGS6W~SUp8fP+m?0H}9m&Us}yR#;Z4FTISN5b#l?n9{|+l zS{o@Q?1Ui!l*MZi$T-5}G)r1^{}QA2p#BV5Ku>Bvea}P8=7#-`gltUFlc&2SMD&iH;nqkRLcE{N7l6oM&C3!55A%AsN3?Uht=;0~LZXw;e(Mp0>>#2GEK5d#;`;jgSe}`3#H#{8|vl z(ah<^QG+^#lg~etkh^Aaewfe8fs6f3Z~ysOh;q)dK zTsNa#7uIUxe|+v6Ae3oe@j8+GJxr(tSO{29wkOhE+kXpyf39Qy4M3YyZy=n;{6`A_ zeQgQo2Q?LI)Bkp@rCtClewgd&>3?qf??|ww1uT(54iDFVyU}v%0g>ytyHDb;n&QtM zRikWnmZ38b{_QgRKd9S{@f*7Vo_*mMDB7A*!ox{uryBW}{2HS=_kltWH=0F>(|D%m zl_@ZQE-vRTTQkQeCqV$Xa^AZBVe~slNeyEOOY%M9CItqLF>Oci7DwOUAlVE+KuQ<2 zlmGL!N|ggc{$1BcH_;PP8Ai1@KUsduPwY1>l|>fMYy5Kk;9XGl(?Wm~nfc)nT zPGtl>E>`+x`q94+i=i0$|DFA>q59vL{2ww;+MJg3ey9O!MG6mV1rSH~W=`vc@{51s z&wtg>!}Gmdq-&l9%6HPG9(Gpc2mVa4Zy0)byV*5^ZVuuaUa>5$lod7tYBCv6=lgHf z$$X@C27XvA5L!Fp{6$zlH;!oAK0uX|K8{B~dlGe z5;>5x%?AMOE{7r=f}L%JCd@MSP!#>-ibnmxS38tm<$-)!(kpG+1q6u3VgSt%vsKct z)ieTm0?b_}(CAL)kaP$;roT8?Jr^gO1YJPi;LXC#h5V z09GH`i!tF*Gd?(|W?MRVH#e;D=4$rl0}#Z76;i~1@VX0T*y350539cgn9d$%c7VKl zTTnhy%09cWp8(jRUjgwt9ow`bSkF&uY*gR3ZYK!P%Obcp^I;=1842sFk~imjFz6Ng zn0R3u??WYA{Pu8~xB}0Op}c0Trzx@S7vc>W_X&8eE>|TH5uxq0p6s^?-(=7kd*!F2 zHEp4^ug4~J$|7a1=zFt>_oUEgoQ14~r5W(GK^{$;rjA<5tnfEr{p{2Nc z22cg$D2_PDk*s6jzB3@3knq9z)#%4A0J4;PVpukJKB(rB$YQ{}RHk~rji#9?PKVcu zVHP=aT1p0(l4kajcfE3UTsmi;5OBoQoerrfeP||;6z7&)hgcSN(q83Akz=pXT*GaB z_`;|Lf8-Er+x##-8>y`SN{*laYN5Tv*~{!t&xiRDsKTg}fQ(=A8dXL)XPH;~*`g2} z`R%nS{cQZX#8wLe?y6Q)U4*NXo3OYcq*14#?|r-nNJAA9Vj2rmYSFx^>nNR(Q-^=j zXA9XbuAyW9oFmn`LZN1M(XbmBUW7~0$gy)mqb%s)^InLO7egQK@`GJB*^|+AtP=7L zyPZ{_EFlGW>>GY7s-d0QKKylg{R`C^jm--n^7-E(UlR-H{Akh|&ZZ3ZM`6$@ltS3U z)o^)Gz6vsLKw}v*Ry-;XU+-jrhA9PKevaPfs2F3QaawK%=6{qeg2<^JO~9QF(2Re+ zWGfgVN9SOAxd)nBH=;wn5#+t_zeP)VPsgrlex`O(4FCtCPXH_w!7Afwn2Ak^&7Tdh z1Z<$~)6bA;l*~5%`U0qp3)V0BT~{jZanm!zeW?-)qdB zv#jikovulOap}Z#oSsMCeEQHAE>UkMyNlV9@Ui^rc?_bN)GhP$t0Lbkwwhs;z;S%= z3T2xgg4Eq2_T~v&62NjVF5qo0I`&7jti~Zk8BLTDc#WmLC)w7YV{E#wr3K>*3nlK5 z0wV02le(t`x*08}6Xm0NldINUHp8zqw)qMkaGlw_-K&$Obw@ zt>ZX~dZV)>JMljJjRez_N8!@G?yFbn&;o`{2U_gAW?*263UIBerxr?i7`_OnsMCqhmhoG(+CZWuhtdg%wj655?v{ie*5n21r44~2O`FqS`mkCRy<>r z@a~(vV9EEWQTl*$KF{zIkkvmI1kZ6mN_?lp5_W?CrT8QJ$N*rYxOXN`c3uUfxbsUR@u?4=SeN;L$MRr`A6&j*70GWvkIn!f$22coy zh+Q|U_R~kr08PhV-$N_1a*#0<`9Mq%nKXF(-nxA;7|{U;Q8YVMixGAkc{zgYIjAr+ zjvaI6dAniPZZ5tWAZhXGy1jWP=dh>Aa3OLryOj-B+trR0n`7T=4yK-Uh1!Ut91Bos z%5&WXx5bgCZiM{%$&G?l^p!Ino5wZO7Kp_$qA+OVYK97Rhw1nJ>LdW}Vp4GDKB!8T zf2>TFs`N@B=|#I=tT`6;(2Fm^pQr4ai7e5T1xtD*G2|5Ai<}q5<#K5HUL7}!ZswC! zeZ4I$UH8o3ekB}_W^?&rq8}f$gQfEO{QF?Cmx}cwa*OTB(G~n9SL7NxJh##823qAb zLCckJ5=sB3;`8#Wrh1Av=P07Ml&qjED8N7zXK394i|8`z(T%9LZ83u9lRq}Phr*Ux z5^YXv`{fZoYvkcw@1^T6{5&&&Kwj$bGA`pJ0^I;tVPjZ5+x=ZC9fm?cl1T{t{FqTB z(fa;_RTeI`qFRTOQy^0a6MVb@q}jGgq`1TW{8F@1;qb$Dp^~|(xk#ggP_$#56}iLS zk`C+-0M#Z~n2lg87f0?k_}-hc`_j|;#UzA4W{VN2%C)cgM=ne8<%UKBwa?Y2{%9fx zjrgNj0jP)!UN)86HDAwu=W~whDzozDqfmT6i*}lJK5%b(h?8p1+!YzBdujjqeW}$u z1TP(6WI)-FMp)i_+%&v7W90J6DXzQ5*sj`=qU)4AtzNESy>Xutpuojt<*|L$du~l; zr3yNi&VNJKMCd!E9p`=^EmD4P?iYJJiY)9Zf=V5|CsWLwmKB0iK<{8Yy++Zt!CYSR z1kTn)P(42R)K+ZC0595e#ig+{@v5;FKQ}BB9Y0E|1-=;C@5dvfg+@g))rW&sZ#wo9 zd~({{&w7_BQ<=H|lqNpsIl=2khtzS3F^vc|d$l|E@tkealx?M37Q0#n2!u59)7~~U zXe@C&0-Lg(64pQPl{|KStd>sjLS|L9Ny%880R6?0bCo$EJtSUZU2o!6Qp=$H{M z*wyv0Fi;l5(Bgw3cjI$Pi>gOrC!rIDoJY!qW$)YgJh=Bx9<+>c)x5Mjx3V&1%qE{{$r14!UCY$2KP*&|UoC4T2|;xJ zHu+c!?e<+2#fK5H7PVElZL2|E2jhziIEY8LFdlVfex)AZpS_Y;MFwdd9Wh{lT&JBi zPP|*}pWl8VsW&xV=|NF8u)TEpnRodK4Y*F^x+?dlE8T1yPzE@SEHiLC<-}^WztcfM zarokX&bn!U`F%znd+GNEd^inWI6u~;!Q!)yI%kV1#a@*M?WP}hyFDm)gni7Jn(6!x zfI=aeXrT2}pq-XN*%3oD#BI?$Lwm#2NbAH+%Pi-Y@7dH04)6P)v zQka-kOv}+uTHq#m!s_bjx!!dZ%{pR4dFT74dMZA^0K8^pI(%b+u6Qki9vVWoEE7Ve zgh67#2q>amR2QxGKigk#x{=CqcNcs(IPG5U03m%LTL05I*)zfsGD6(Y# zhdSm13f$dtJ`mw~F#w3c8a!A!)@zSb$od>e;hlfD!$7f?8ZL+sp?w>j`k}?2UB2BJdt-1~uBTk_vp|NoAVL30Xzyh7;Ms zw7KUG>2$^z)@)*N-Ev<)wErA7Cs0tn;u*fHU2mi)wq2HAdpcoR#NHStFmRuXXMg;o zGzTZVUcai56jRG$**qIkF_4yYcD>^1H;dQxU@{{@^s!Ch4&(Aq`)<<@1VlRV^8u_O zybF;CiT2wRHUpvQ&KhG2EiU&xp9_T$gcziRvYiv~U8?uj1^T75?Eh@Vw36lhx(^fh zLTjV#&x;%O_$hNwAcpbXxpyD@wrCG%0YA(LZ(+hu~QIN;; zhkZGeZ0@pCc=wJ0sd9@+0~VAYvB70u+S0iRBq!eH{G_8+Et3EXNPs;9c>4);UnxC{ z&5&}-`Y|MDMMz3YzM-`2#$J>}`sc8QzHZ_(`X_V!;UN%GizF5^q{c2NU8hxWwO6%kVMBN-uk1pv+8* zmz~ITlB_l85wdJPLTzp4F4Y&W#E-o!K`KoSSJf1R+L_YfF~}`((D|*z%?qG&lslM9 z(X28ywg@w0#PRQsXEZztxq5*)Axu|ZsPm9sWi#2<1>uU(ghr8fR$JNtoIKjhSbn5a zWssZ+R2DF$P(Ld}n(I3)FdI_$g#a*hUv}bLJVU(Yk@ua|D}ZCr5!1{400tT)iKIM8 zmis9r!3hiDxL$lREej~iR*x(kMVJTt1PdeR!$X;Pve1o^M%F&h;vV$ax9~jK_T6HM zU~0dOjMr2C^cb1&AH8I5xkp$_8}h0R zG4evV3XI*w3^2K<-6UCeQu9`QklPxkkrQZz-0jS;caW7PeptS%A(`tZx@{R2d#o=! zQp|5s<#Y(C7Z27McU78iE6w0NS&;J()xSv6it>+af*fR**YUqqAS;Ir zX?^Lq0TkPTBgbyj@LddxSUIoWTn@`vTz*3|Ra=mSX9m%g-=I`V_V<0y_j61Yr5#1V zvG-BR;u|3Bo0{Ee#O8%OiZ*+G`!Gdc)(fn*bLO5CBsP-Wb~?FFSwmmU$vJjBI-9T? zaIGb(9ICke80=uwH^EwD&PHv~aVy&FbCWHa{}E67{lTLe$RdwRXh)oLzqqmwR=dvD z93N?oSRa1CoT#r##+t|BVfo@%cM)vd5~PilXH}`Pfb>H+T3vt(jt#nt`YS#ua4njC ze^H+->#qoa)$Jovmzy9RKP@GU*N{&FVj)b7LNjhg&15bC@nZ_nS`!SFr1S5^N4}I? zmr?N-4L(Dg$i2tddkPe0EjgNG-5vrOfKyXaZZ)UVBBKS*!2F?@V z^02M3vXXJHS_(^kH`>I;gW5z=aNv(rskvuFAn=1cc&a1|cywK?^FyS6wh7C;`6ea= z>z+Ahr<{p`eN_G#1eAyMF6q4*fdW(*0zNoTZxFm%H1p6q;(NuPyZmPLuq_vs`8?;; zG>m1_9BvA$FR`avV04cNITm=?GdgP;ZoKJ?`EfH-Jz_!uqOE$3Rl(n!2^F!?p|>1L zig#}D*%Ht3wVJ>WG(T3QezV$LGILp=xM+wGa*yqKy?%ULBUGKcWEo@dLjDN096%|s zL3q+?$!S&gT@coPvaX>>1Uj)2v+*sSHNu)ZO32nX2gNU+WqnZY-yM~4SIQwd8IvC! zo;{S|h}+<3w_e9xlY3rWJEScumJQ^O;fO1T2l%ga0nEI?WOf~_;J4zx|FBAzCPYB? zcq9&|{OXudK2AxAcrle}9ck<~qYqo;kK}h}YD_a>nu%cbZlFnVWKe>=yl@!MfPkF& zD;F~pMlbCHWDgx7v4WPWdI&bb#qXzj)I$e8u(^VngLCuyhfC1UPZm?Tu{MUV4rI4E z1hTen1^(o(j=DQaV_CbfX%abv(V7}tzt6*itD^8cpibD9dMFz@dgdcWmp7lS^vK^T zpK9R{F6S*?vMQWrD+X^kq=+AI$gJ=!UKSqnX58IRr=eK*q}O6l!#}&tm78@hzJASV ze$M4(^Vedgxpy`9E$>fmznnS=nRMm(=0+Vw*63g#YEWTD5U+l~KW0?))p}X>%BI3< zVk4trF>Txf2dOs>(hi)ai_=I|1LPF3tcTN)T?Cmd93@l^pOar=LQLShpYp#G9x8`6 z``jM7m$&`+X-e>8lCa*FU>2~zX!4Bh7f^Yai#(h)!}dV-M)X3TY;MI?k~hk0o|f#& zoa$rt*BIJJdl4LZBR@6_X$;-~O^8YxbLtU|F*i$q9*+B`t8a@3nBEGzqlq{wI`=>3 zeWia`$ND-m&uOYniq71o)6X2M919%Z-%7s%r8edC;l>wbY|n;U4rX@^MO11^DI?ujsy5OQgMZY)Dn#U>2*u;W5P- zsgv|#)w0YdsfvS=f`4FTq6dMkAoD(3#NES6NNdy}n`C8-rpG0bBeBAYG6JEWQ5&-j z8)k(nYNSr0gr|a*>DdANmPz?2X$$Jy5VkK>v&IVcM+;}w?2*S8%7R@qfoCl$k>)OD z7rQS?08-F;E^;MiXhxas`K}+Z7a{ zba8kfn@&X+C~S|RG^hcoJ)jlP3+FVmtEDK=1t<1pgPjK#KM`PF5pmyjWq**wyp4P? zOZQDyE38GG5$ioxh0Yv({2~LE9zfE)Q{Q}WC?ztm72DEEtd$b>j-exrgF9Csn_Pza zy->Gy{(ONDiL@j-2Zaajl!)FBIC#r9firq{H@SXv!?DYMyG6@4;+EbZNu6HfxIR6? z)E|M4c{h7D&Wo55FWDfrT?QJeeyHzs?GK>3QMgwDKYZo7jwcJcDr{Xv+kG-lU3ldp zh99FlS0I02jL17zOKrv;U`{*tQG>fkBnwGs+E(<~HMDm+6TU)Dx=?>C8{jW!ICkkm z9w}ubXap*bU|^*kT${B>Pr&C%;^sPdeC!(8r!M$q#HM9Ax9I}KWDP%Q*P_ITA3Npk zjRUC~>GEd@=&t)@flsul!otvmv*&RwHWTbi0)RVG0WNS+i1D7N6q%lNi8Y$i`yN?V z|B+(4!?a)g&Q?~Ff=^8t1b%@VpSPW(Uacq9?EuIDiaz`34JJ-&lzk4fHWv+!(%HZp zG7#(``&W)K4#mpUe{g)Qk8%L;$qoZGn++bFb928BNKjGAoe+F6Ifx}hL>_wY^+^)4E*`H z1zE~#%XY;QnqL#W;tsf#n*qyU^l@cI5KG-K!r+3zv6NCNCGXBwQ4pS*g%VZAN0aFF ziXR+gS48VKaX1F&4yhy}0y;bb1ZbaT^kW*&ymvkID_(L^W_dg3b!I29hRi*--i`uW z?QzX)e5}01r>qe;Jj$%**~+0{c9D!TP)>Dy`o2GYvB1NuE1B4UOjg}w1hvPb!EAce zD~|gUHztC-4)?X|b0X$!%ouX>Pu&){8`MmYjW%r(`zhX!>aem-=0twanyk}>IWP8> zRc`B<6#Y8_gK$;Rdl=sDNJQ6Ct;!sC7?bt<%qt>X>P91gU{%{^D?EK8)9oOhUAgiy za#yB1b8zuFAI8L(tc16ux8i1;llHo&xKgEVEJ7U=S+AZ-gVOd(hNf3l@%x-OQ&s;gy zR2~4V6*0lOtWUk!H(JXW+?9(%QyLVvE0{}h8pK4ht$1`55uz=2tRdfyTM50| zGF@cg6DRFxpA*yjnD93H22AhvQoEcV!gaO9Pf#&n0)tn85A9?)Y-88xx}?m9MAa6% zb4LqK{B0_()n_c*OGdj+&hmLb>P*Bn%p8QZfB3*UBh;gH4Hw=YEiqmx= zCV<@(+Lq-_6*SnHLkn$MEcHSIWZnmIhez>ij`C#79wB}KlkBZpZ!c8&_Qs@0IXH;L zwu&e8sb8oP$Ti9=2n)}@Co`cWd}I@CkG)2xXfVrqC$|M+0fDrGTF%e&noQnpGd$Y% zOsWXz6D|0ytB>bBBgg_HYKj=M zA5w<&)ki-y`>nGoiPP6X&~qkOmt157ALN!~FdGBj-fo;1 z3s`ZD@&4ce>B7%jyNAK?3y2K5@wRuFQSS7Lo>>dt~g6c(pZ7iGQBv~?f>_hL& zC=HtDB&*YO1S73T4Y=b;DktaNm|2aMX52#WoaQgOnft35IT9Bcb}4$l@N5g6<{?cE?AdJ+!WH$jkbK+maFI@GoJ8C?Aw3!GK5 zl76p1psJj#iD4FV$}=t$ZAehXlbr0NB*Zu=h(P*brJtch*rhz5twlAZRp@asVu;w` zGdpuddk9jB6uRv8IdGZkJzL!=e>*u?(EO{kJVE_#Zc`S56$xL3WOrH?)wztsBzvhw zte@&axGdm?-0}pN80Ua^%4D}6x_i$iIFM|X9LwrzSjzFOU+4q=3{%aET*s!UQ1KY) z{NtQ#T1K5byj79<`!qUci*J?~Xelhut{0Ia+ghJ@nw9nFb9QT2MUj!AGi3X+0F{*#7rD%*~)Sj@; ztj+q@s(zmP$@Xnjng-n=qNk$@n88jtXY;G?L($TDLZWdj^h7!{8dPg#%|VEyDSL-* zIagQXt}YN@P4HRN;Hq)CVtb%pHhmwSV`w#-ceO>QtP{fj?#0eTwfhP|AhEL^MZ zS*mZxAHi6P5y6ailIE2f5g)JvEU!1R)RZIy?gQI1k7v3pQgF-6FyzTL-Jpvee2xq9 zsR&cMT+&(1C4X_p`^vCh@38Z37K{lLNe{MM!F8Z~@oCY&@#};>slVOCr@+NCU~k!` zQ0}tWM2q&FcvL;bJ0tS(Grb!zU!u7 z`fkDV%l%+WP`_yKJ<{j0j}cr5R~#dfzG1&vI?mj^aRf_Os7Xxt)J zV^%Gd2FuE4iYRLfq^{^k_c8#@;*W!Jt}Vjy+x)LH@=V7uCq61g%P4MD;X{kC)8(j| zSix9%t=~0Z_W+tFBiBJyVOv(Ky9rOuXL{WMWDE8yWq6Ys6gsg#1QAgA{xlqiA z*;83e<$h*-69O(=^c4Rx3+xAa=tx=U7rR{wC9Hq0EK7UdlM{jcmIWm@}?YYX3PtfxFayzF=yk3 zKFS1B_8C5ldd98BZ!c{+>wQXe-amTSeoMfp?B1LsX+zGUi#wPN`{G&L4|1?Xw95m} z;}Kn3>hj2;#|~a?2cd`dX&-ipw8#kgQ^PmY6@>4t@%7rr`RMN_%|W~}y&D!wk7>Kl z#=pfWpX$vO96}M$ z8y5pH7FpYWfK^*kVKwbMx=luT_#@RNSiGe0I$%L&(m-_f(a;6Yg>bvkIHYtiA2Qit zmyY=0cg=atKcP{L(I*~@4tt~IdmTP9Sh&7L0eu@m0QunforP1G=wS&lB%UmyBz@}<<}M0@ZWjiQ`6RCwkYPS~ zfE&VL5_HQLX+`uEkiL2EwMqFR5(`F8@2)8UC9>onvs_tHLfGY5?0y&t7Wanky_wRy zn~TR!U}4GkAEUcL3jnQTYjLXuF$E=e{@*fN`tMMJ00ZUd0}X>EEXofjfE}rumwW8a z3t1bkDF-U7s@67L*XgguA10grdLW|zE8rC8Q?FQ+AEV+zs#G}&wv9pnW^$s ziGq@xZ`0zL{^cK(@;3x@i`Jy1zir=tdOuU&1BaQ|@526iJjZ{$jLHhYNX17x{=;C4 zdSpb5GI03)2gBHZ`tX2;P+LLoIXZCu*ELB2#2;I%-1Fu4PO$$xc0!5=@R=Ga_GbMu zTK_UxDn9`ZlivHJ`PXKa|6GL?MW6O`{`jwJz6AD_Y5#n}Bmdi@`Kz^7pkNIZ5sCY+ zYvKX7RQ?UC$zN9a|F~IG;{lk*!?OC{*GvNr;}Nv7|JMNA1)gQZv9hG|U)N;13mpF6 zp#T43(8C@HCIah1Du>|c=;%Gl`cos~ez3F$xcCXzIk}1(G2Ra4s=NhUbMt@;>;8cC z4~M^&g?}%~y**XxjKBe&{R9+5&na}vDwcrfGYG8tZ35mqE4Nj#SxxQLM>$EP%!)~G zJauDPjM~ERP*%p+WYj|hq7Z5SS9@m_5LFkwecC}mIur>7MWtInN(BiCMREX9q=ruE z5~V>!Nog2RN`#?PN`;|CU_d%XLVAD!{Psb8egD_r%@?m1UchkX#NKPKz1H*m&Mjyy zOK=-BXa=2J>iVrh2g+HbQN^zT56Wbgsh$u8gT?~Lt$5Rb+{6C^ur6tTznUyb`k4#N z8f2fK04INA47nVyP$AHCO@25b5Gern<4S-qB6wa2ugwM7V&9D4Gir>d(X)x1`*qK} zPv8kOph{ij2U7Vd-LBy$M34|7!5W!JRy$A9`Ug05vAD6)Y5Is@a1eEapX%yr+y`CxNAexj&wfu^ikL=7hO!DoiIVta zAa)9|%=(G>43H^|?xRlaOJ5=c>aP@F+|7==z5r8|`xO!|%tbU%E*yF%9}DgHndSU^ z@*^Z0{v0o>QTad@a@?I9-=C8xl>yQxy=H}DtfG!|3eRwoSG<;F`YnVHyk&a0JJ0S;jtD;W%n3MUyUKk*`!pEh zGBl@BN#x1r0&0!Bs!BDx9aH6esyAZM7Ui%+tHw(+6*B{5IdHgUzGmet+FtTsmp;c8!1n4SxEB z8CZTPe|UC#KV?WBABPbzW|BEBYDVYRU0+i6C)ZOI4eZXFDOd&t$GsBzBfkB0%| z{1ENIU|4`0u6}iz{pXc;6b^4FmqkKKVLj$!B)&061HZ6BArsGQ z=FxTIA6|Ne+wretcP1V~BCwMHEp*!hXcU3}l}R_f6Rer${Hz<|nM6h^=L1H4HqfQ) z8cB2;W?%13mCHl^O>qpsRnQ%RsmEBvkdwHPE4%e(slE1_`?s~|N5OzuG9Ad%F?t;r z#+{lc0k<0g)_=xI9KT-kMp%9o31(Cvj!?J@$b14#YU!EGV__N89C-V%;!O+ zFG6e_+!|!ES^W?N4hXfo=J@&s;`7TP*RIXuJ2Ja7u?F(WXl}h%RrAZh%ot~E0OYe( z&5$tHaSxjt62j({XYV(4l_Y};;a=r@u#gq=6zD)N)u?K0a1GV~i=zcMxtre0TM%fe zbh$0bfM^>cTMSE8jcD|sUwJ6`&TYetoATNcD5mw0675I(fl=D+$HJ|d zdZz2jbR>|ZNjxm{)YZTLWU0ad=%(MQ9O0XG=hM0k>AqaJpqXbJMYvpM-Tz)bD(Neb zQSz)Nw>zBAI=+GVA7rF6_in%XvRG|dghgUN}{t8FX}uMY%!m#}&bdpf#m zuf+qaftRy1Jt@C%;NAM;Ev%-$R1GaUFKl>9i~8Fuqw3jVvBzR-#m=nPwhL(l>X zLcRtjk_Vt{0_Sr9mBR540334$s&Vmx;Ko0f>iB^i<+-kzS27zjZh&Bi;sY*m7MR07 z^6$}}p_}2a-ltKsj6S>)IDi?|{F^La9j6FNtUTMtEz)T*v z9YrrfTlFaUIK^WE^=F_B`nk;-%ZZN0Vy>NooxjNc!s<>ZgE{Si|AJX8S%ed-Xq6Oj z$J=CGD&;E*n5Xj%h&Og0wiQ+{s?1CwE5q)SEQ644znzt!(JeQBGcCWK3t@65TeXB7t)7rl3QhwC|qdLy#>|VeMVNF2gF&_ z?gohS%irr0@Wra3kaZveD`rGgIPJR2(t6OA(HX`1ZI`B*2kx7euN}D1MpO;HZ@! zvVZ)^^W!Q;DfXMhDQLk1cGP>(v2ZyP(XA*sTPL=?r54v#2E9$`JbJ<%$VE}9Jl0Mr zKE#o%!J#IybVy6=*K)V`r=3st&7)D##gGnHy@j?n*nm{oJtHA6`RU@(a($U$990H; zAUrci%N_2F@QNz1h20RD-eK_qc7=fzb?Khkg1YSygA>AW?JtXz(X>nz5c|Y1KvOT* z9mil_?mMn+dIiLqkn6*CcTED4`V;#6fEpZxb6z7bSj_NWZ`p__O@cQnrlqJ$hy$!o5&h4xmdcge~O0_|Y!)%+T=t zX=1HZItnIrlK4pBTdFs09t!4O(b9Q7Az~ks9w%G6b*EY*>ACML&%HMi7>fR>SElRN zMP@u0{GOglIod=4Ugy3pHNb()wA;2_mp&JOx&DATgyE8^b)4+Nb^!69}6;k z@;N;qJYv?{qVttmxji4ltLC4WV7R+Blbv;2_3NjrBvrrD!#z5V^sDo)UEcep3{VR{ zCH9p%C7C84vm!#__0jYW7eWZKx`-R|DOmRk@#wQZW)SY-3>z=ieQa30=?H&)NK1Gj zi!!z{ucJ|G4Yv!hPqvo`?8*x_#eGMa>a9MUvAN^5a@*B&Ui+VgTK6&`nQW#e1uB{( zR@6G2_`Fb%W9?VA4U}_tRhb-blT$DIch`DXZ|q*B)M)g}qLA3YbGSvcLp;yW(K!dM z&|Luv>i0BjrwBbh1d~sK6-rTjkhm}mR`g&o6F>mY*PtmCtfaL^NTi}~_I7Q3g)>!P z)gEgqS z*6JxaZ1u8li*CScd|2s@#?fF_CWQ@Ds%sodJU#Ian?OsUTM1e`^m~-q+p$|nCf9zu zxwQ5osZRD|ex2R@OIu=JjkH;0Du;cnBXWJl$FA@zmd^I$h8kQGJ$N|gCM6_CoYFk) zOZ2!}%@g7+hA)sxsemC7tJ?2-%9)TX@?73|Cpp6)!y;jq*+f@R-P$6*TpD(;@) z=aQ`(G$LxMj_1xnzx~L9&WWHUFBrKp}zTOK!y`~Ee?hWzk)L7 zO!Iz_oYPl^@HTjg9=l*Q{V0}S+S(3L?Nc{T@^KU{Q#Wo`x0|3O$m}nG9q%X%FjOB+ z3~11$#rJBjw z$b~}rKI70@b8|Y~TF8?qC}-iD4`i>aJehPEBu+Xg3v`rjTe=!g!MkhQ;(Dq5Nq3c^ zB0aN*1#ztroNVHT?ETZec8lm{@dqM88q}3o-#ucu!zrZD>ah)>9^L1?yex(7*rR;+ z3*E#ut1|)54v`J}vjU|l>E*5*V#qg*dyB>QF6PZ1mX&yG)g+7M?tYTX<++nzL49s0 zCQC}>f>PFOl@2?ug4@ZP-5x%0uN2`|0nzRrb~t9#eDqWhFa1fM7b@>9HLCN_5{)EU zrVg#Zoi%-^f52=+TPs#`>|U1LMl)?Pg^O4szFkaB)Ow05FqV`Shc)Af799hi6I48? z!MhcDy4XEf`2vQH83_g%K#}_kt>gsnYa2^qgZiAd<-->DV`R4T zq7aG6==h_nW4+v{*fV4sccQ=uX0LZ{LJeOq2p7m=`~mO}!*CK`2;%Y`&1e-xuf24V z{$Bjr8i2XveL16z-vdT4l(o2|!xg|GvuS7^{Tg@J@$WIlNT@^=S+QZ@P`8C3H0#Bj zm=XCzwG7e>qvXyBT8Ia^?6wz-Y%y{}Qc;E>+t%LNORh)`b?*q=_r}`Tlgg)Y)1s5eZTktc`#eugoHI$W+;adBvTN$;{cbx)9_aKDqD`SF zz62O!RYYS>yvb?UTbN*f0|dtL+3mR3g`kUwd?yED*;A-ZNl6U%3(@yqiuVHB!Vsm3 z<5Q1hkV*{=YX`zt`@F)zlZHTMc=hPw{+nbk_JD1`BtHitzchm+{bkpb3*@(&YiETJ zJC4VDx@Q5mgKAVu#-uj~DO$*CCnmRfY3ef%AzxVItGUL4Se}oOuF4!$0HO1x{8P*b zs3p7{qdnUolf>wBf)t^Q&1#OpAL4(cr5N=vh3q?j@d3Oi3JA`ryQuav>9EL)&c~&% zDTZrf1?)wK>xj}krowCoERrO8=iLE0kUVncR_3KmQ`>d?+!0wT@H^~`Qwz-`*c#i# z-S?B;v9lQ%Gl$asxvZ@|!eeKsfzWpR%h+u{rd_*?xEH3c#x;X1tE*SN+gZuW$J{X^ z7n{nmrGV-1dr?G~QCSqWk(jE}j|sA`Ej^v zD}Fk+PUvQlm}FN)3$XaQBgs+_IjMvNP<2oh2fZ;oO4s$Kd;2zlrvf>g$o=2&m9GT zc$%c~G;GaHw*`s_GT*F1yV$4~oeOQ?(1wgd&THanm4f)HZeRYIc|Il!EYE3t#N2!x*xQzFiL!)- zE3j8lcgh%5h3+SI`&_`A8JCG(Hq%o3R*BtQH2VB$oCYBIZf-BB#$)FkKYR z6bf7hfYxM6E``{)Llu{bVgoN$QnBr7Rxpeg<385AT=zu0l_Z>e>#;Q6u}^{{85?bn z&!79f_R;GDUPdY7^(7hg7;?sBN~;^DN_L!Rvt8q1Xy?nQvbD+Vyjq$?un z#1+evAztZWv803T>|&kWazA}I@>p#%&wz~vS3r7l{^UW5JcWBQfAP@+^CPUz>!Ezw zGIY{tiT8!owGNI*quSX)87D2y>ciD!<$g2%g0>ci)40r>!8neW5naRwbu$$&iX=z- zUmfNWpCS;Rv#Df%V{Xx?lEkvXqG>8Or2DXNg&4JvN?i7KZhx8DBmZmIm39iN0VJ<1jUULj z2FdQ%F1_zqFWof+t8gBV>Be+5uf{Jt4YV#ZM>axLA!w&;OYdjD(PpXyta*Nr}e42u=cjwS#DK z|4q=ra3@)-@#!Z+8T%33b;D62@(rEm;85-XPEAS&L=$bb@_qcQ{Owr*PM5!UlV5o^ z6m1A2WB&(zgOKzy1nYoMV{RmsXKVgvkKw=9F8%j5tN+cHgtgrjpZ}ll2vg-;EhJ@x zxkMrgyOyw*v_CL_(k8~lGe;DEU+FEd*u(3DMcV(>rh2JidMIS}0RhnX&h_QMbnCd= z5;ob`&Q4XwC$LO(Jh*2hhrab_4*dBqQ2d0gFn(#Wad+Nf!_e5c0;*=Ksnn$X`>^_^ zn5y}c5i#udrp_dx-a{Qf)Wgnyci)UJyqf|ua0+V4Kr#HVq0Za4*h@B)y* zaT@!qwPJyTSra6GYF;~&A(~e#BY!{}g0|C>6+(j<85x@(zt1j&S0I7mRg_iQeJL0x zo~mEx{p%XXfs(*vC|og!tH1T5D*fPz^FOo(=kNX z>?))u_EGeq@?IdtLndULsYj>ENVn>dpkaYDs>jI4p8kkCSJss%lItaFp~)m*@_Mky z7%t=gp{qSDv}b5Y&f*5=%r|f|FMZgu!hjlZUpNC#A6Q@FEIO~O{p@n;RE%KMxtHdD z=sEK4esic<)O2@}Sia|Bf{FU_lc?Bj)P5a=1ZnZ+U_WltSiD>;VEq zc5(gQ!^jAxr-55`}_NgX=AE?jy)PriQ_fs^FhparC4{T%6#z? z>Zoy?{NM`s205@dH1-gsd=prka{z6#Za4Kg8!x<^7hxM#CU`$7is8RaS;zqCk&Q!s$%Gq43O#eF`@HcXN@!Mh{UaJ#Ln?SX8SIzNjAzbfC zo9D)y&e{17V=`wG02(RN>pjIF2JqOhRlQ|QOG>|X@jlizcgn?X?kp5n4oFO)nA zE2Hs%Ud=#vZ9SS*x)0ch64`uKn-IIsoqGT%cme7lnDHw{%d^~38AxZKmeXIzh-3{#KENBJ(6s$x9r-TB@$e5RL6^E&gP*g10l5#zAiwkn-7a+=9;c z>eQXtL0n70612s$Pq(Y(4nlOkRSIRRZ@n`Kc8_&lQbd-)KYA^EIhk*U+C^>ws)Bk{ z&(;0PLb(R;P_ST=NNWYZef6I+$V&;(g4e_`YgLd;ZIqOhT`33PaE`65ttiQt`f+%m z+0?xwxS@RW^5 zYCT;x=4U(NCD>y&IpT~Pz#QkSoB7uxLeWwqY`&a)j77?izRxXbeLogEY-o8K9QniT z@pW)5Ij|rgTS`Ks5C2XMH&W+Y$HH+#di79XW@&W>^k94GR|GlLw zglTUI)p;ngF+@&6xowPktkK=w(G{%>(JUB!+-sTPiQy}QD`Pdugk@_+i@2RL4z?u} z)mtJBg?QjGuk6ttUFVL|ZznC9+P=YQ#|{YD%9`HMf97j|{4z=K$Ox(>p!$5zX~Z<5 ziDqvE74_(T-sF5(;#hCFzWZo`xlH23nAhvdaF^hbp)TOH9*jTS_9%OYuSnVd=S1bx z$-va7OfrRSQHOe0i75Po_r1p&m;J$-Go9r+ajycXR9fH z&LcgZG2jH~vH-z3OPExdl}M2cJPTZS{34ZQOQcK!3*dkhgR<-r3bz z4d`1&B`zo#J9(#7eu;_ta}oja#V25>x^D{84#|xm3=Ui!{P5&Na!k9|JFDKz@88Pf zjht1&yC>Zdc+i|n&JA}->n!(Jn{sVCWqG#-_{n3;OhUgujJ9O|J9pivsS%4kq-J?f z8oN@Yll5rpK_jgXxIkrAD(%5rLR>-r2MKhx62^d=HoY=l#5xssJ<9uAn~Y67uYvGf zh9x@bm3`gT;G8T3SKKH$b`)oiF4xOIK5uz;<&-y^^nt;TIdpPzyAc$y7cwJ~+%vH#bc?ujMhr~eXbC$jK{b_YPB`^)Vxee1Iw`Liuf4I{X3-U6+@aH=y`o=v$+ zK`k1?YFBH7aVFRW2;{>KEwB1BeNX9L;@wuFvX#1)R4{nKKkw8L} z;OzL^%_>+gR9Q4EwNMYA?E}Wlpk44%nmhjfKVL=zG2=f2#PtriW!dE}Tfy=^x?*wL zE8psB_Rb(WgS%2?Y9zNkWGw7(OT)1b%3V!O({~MX*;{lXC2WVDeFK%?GUwUPba86! zi*oy`*TwMrpabsi<{rRSiG^nb3W_ z-q+gE@g150HSg%8kXOlMXEb|7cAf=?VLc7F9!;!Ievd@Kr;QEWlH7dKhZP?mmisS_ zB_F!1PCPt0Sx0(2;SJwAtZHhdJg-P}akYbx&6#^(De)Z(LHPU~86)Q=zPH=mccuP2 z5Wz1Oo(5=PDBb3{{!ZEeg1~YZ4E2BUM=$U=PAv3(V*7wUK~If1VO#y;@6#T*Z3_4I zXALpGCB_8e^EZxW&VMdlE2vQpzt?wG1a-cwdRgo^%8uKuQ3G~H+P#1-*XiV+8y9#I zw!i3+ZrtGSf2?bUUh;V7$C8U;G=BmPHAjy@AS*GkHJ&7%3dbt>qMXgYyS_O@T3X+- zL)CO-e|JOO)BH=HaCoAuub0%;fOf}4-B-UKRAADHmdn*(CCi`760J2N<(2C~l_q{Q z-*uX!)MfjXQT1pOUB7-^M$F_fT1`F=ouAfvmo7v19UpS+`1xPgg5c*E7_3!Dq|(eo z*EUJUKN|S5=4@a5J1PdUftATS&*0QaojmlrH#sl42IBgx-DrPZfE#0|ybY;wiuB8$ zL%}2a-mmSRtaWI=LzmYb|9z*tZcKb!<6*|YT(!Z>LDsuy8p`LTh-MJ~nNo)ifeAUN z&7Gff8*Sp`U?>H-Cal0(1a3E#OK*3n{+=9yYKsklQedRHEd&YOQk_^`OLoJbY0rk$ zCH@)n_XA;N7S`OIbClW~90MAJZ?C(Xa0C_p6P?pVh)V8s@!DAyq?r2GP!fV_!D-I*SN0#B1W>;9C0;8K!XipE|#D)LV7(BqL2tO>g9$ zi%=&7G)9sy+2|3Wm_kii(uz|gT`ZCS!*03VCg9IAGNBO+?BPkd?C{zG(Xr^c%^-0| zD&)8JchR}PO-)Bua=I(L@{^<&Zh$=DO^Ou!=1Qjgbi4xz=9GvVyPr&)*tbC}O_emM^u-{rL^kv$1!usHf;+owNqQ;as0!9J>ZC;1W^2DbSEd|Yiz zc(5Ei5*wD^SvvGkJYhTO_t|{!lK$&wB;jBRMcCG4{+&Y99M3`mw30#Mr0`Fi13}{) zSgy}uimd)j6X+l_XE_GudT;9d85u|W0cFCB@{i>_N&l>|M_&y?d2Zfj_c!~aR;z}3sMfsEq6Qv|!J|L)?iQTqSwSQPYFDJ38v*-*KmaQDBd^Irh^C!GEl oBY&@7|Ha7PW8(j<7`b|H1}7;(W_eqk0Q^%?)VNV}%{1tL0mj`ylK=n! From 1dbd16115f2699b26ac5ee0aa79f4cc951980b00 Mon Sep 17 00:00:00 2001 From: Waleed Date: Fri, 9 Jan 2026 17:50:10 -0800 Subject: [PATCH 10/16] feat(sidebar): context menu for nav items in sidebar, toolbar blocks, added missing docs for various blocks and triggers (#2754) * feat(sidebar): context menu for nav items in sidebar * added toolbar context menu, fixed incorrect access pattern in old context menus and added docs for missing blocks * fixed links --- .../chunk-context-menu/chunk-context-menu.tsx | 7 +- .../document-context-menu.tsx | 7 +- .../knowledge-base-context-menu.tsx | 7 +- .../knowledge-list-context-menu.tsx | 7 +- .../components/snapshot-context-menu.tsx | 2 +- .../log-row-context-menu.tsx | 7 +- .../context-menu/block-context-menu.tsx | 2 +- .../context-menu/pane-context-menu.tsx | 2 +- .../components/tool-input/tool-input.tsx | 8 +- .../panel/components/editor/editor.tsx | 20 ++-- .../components/toolbar/components/index.ts | 1 + .../toolbar-item-context-menu/index.ts | 1 + .../toolbar-item-context-menu.tsx | 88 +++++++++++++++++ .../panel/components/toolbar/toolbar.tsx | 98 +++++++++++++++++++ .../components/subflows/loop/loop-config.ts | 1 + .../subflows/parallel/parallel-config.ts | 1 + .../components/log-row-context-menu.tsx | 2 +- .../components/output-context-menu.tsx | 2 +- .../w/components/sidebar/components/index.ts | 1 + .../components/nav-item-context-menu/index.ts | 1 + .../nav-item-context-menu.tsx | 81 +++++++++++++++ .../components/context-menu/context-menu.tsx | 2 +- .../w/components/sidebar/sidebar.tsx | 53 ++++++++++ apps/sim/blocks/blocks/discord.ts | 1 + apps/sim/blocks/blocks/fireflies.ts | 2 +- apps/sim/blocks/blocks/generic_webhook.ts | 1 + apps/sim/blocks/blocks/grain.ts | 1 + apps/sim/blocks/blocks/imap.ts | 1 + apps/sim/blocks/blocks/router.ts | 2 + apps/sim/blocks/blocks/rss.ts | 1 + apps/sim/blocks/blocks/start_trigger.ts | 1 + apps/sim/blocks/blocks/twilio.ts | 1 + apps/sim/blocks/blocks/twilio_voice.ts | 1 + 33 files changed, 388 insertions(+), 25 deletions(-) create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/components/toolbar-item-context-menu/index.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/components/toolbar-item-context-menu/toolbar-item-context-menu.tsx create mode 100644 apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/index.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/nav-item-context-menu.tsx diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-context-menu/chunk-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-context-menu/chunk-context-menu.tsx index b4d2b664be..4586b3306f 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-context-menu/chunk-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-context-menu/chunk-context-menu.tsx @@ -95,7 +95,12 @@ export function ChunkContextMenu({ } return ( - + !open && onClose()} + variant='secondary' + size='sm' + > + !open && onClose()} + variant='secondary' + size='sm' + > + !open && onClose()} + variant='secondary' + size='sm' + > + !open && onClose()} + variant='secondary' + size='sm' + > !open && onClose()} variant='secondary' size='sm' colorScheme='inverted' diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx index 56c8cdab00..41deea199e 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-row-context-menu/log-row-context-menu.tsx @@ -47,7 +47,12 @@ export function LogRowContextMenu({ const hasWorkflow = Boolean(log?.workflow?.id || log?.workflowId) return ( - + !open && onClose()} + variant='secondary' + size='sm' + > !open && onClose()} variant='secondary' size='sm' colorScheme='inverted' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/pane-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/pane-context-menu.tsx index 96b36e3bb0..77101d1dc6 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/pane-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/context-menu/pane-context-menu.tsx @@ -38,7 +38,7 @@ export function PaneContextMenu({ return ( !open && onClose()} variant='secondary' size='sm' colorScheme='inverted' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx index ac23aa5df7..bc14069563 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx @@ -673,7 +673,7 @@ function WorkflowInputMapperSyncWrapper({ if (!workflowId) { return ( -
    +
    Select a workflow to configure its inputs
    ) @@ -681,15 +681,15 @@ function WorkflowInputMapperSyncWrapper({ if (isLoading) { return ( -
    - +
    +
    ) } if (inputFields.length === 0) { return ( -
    +
    This workflow has no custom input fields
    ) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx index 1392cfdafd..60b7a7b13f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/editor.tsx @@ -1,7 +1,7 @@ 'use client' import { useCallback, useEffect, useRef, useState } from 'react' -import { BookOpen, Check, ChevronUp, Pencil, RepeatIcon, Settings, SplitIcon } from 'lucide-react' +import { BookOpen, Check, ChevronUp, Pencil, Settings } from 'lucide-react' import { Button, Tooltip } from '@/components/emcn' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { @@ -15,6 +15,8 @@ import { useEditorBlockProperties, useEditorSubblockLayout, } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks' +import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config' +import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config' import { getSubBlockStableKey } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils' import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks' import { getBlock } from '@/blocks/registry' @@ -58,9 +60,8 @@ export function Editor() { const isSubflow = currentBlock && (currentBlock.type === 'loop' || currentBlock.type === 'parallel') - // Get subflow display properties - const subflowIcon = isSubflow && currentBlock.type === 'loop' ? RepeatIcon : SplitIcon - const subflowBgColor = isSubflow && currentBlock.type === 'loop' ? '#2FB3FF' : '#FEE12B' + // Get subflow display properties from configs + const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null // Refs for resize functionality const subBlocksRef = useRef(null) @@ -176,8 +177,9 @@ export function Editor() { * Handles opening documentation link in a new secure tab. */ const handleOpenDocs = () => { - if (blockConfig?.docsLink) { - window.open(blockConfig.docsLink, '_blank', 'noopener,noreferrer') + const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink + if (docsLink) { + window.open(docsLink, '_blank', 'noopener,noreferrer') } } @@ -195,10 +197,10 @@ export function Editor() { {(blockConfig || isSubflow) && currentBlock?.type !== 'note' && (
    @@ -295,7 +297,7 @@ export function Editor() { )} - {currentBlock && !isSubflow && blockConfig?.docsLink && ( + {currentBlock && (isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink) && (
    + + {/* Toolbar Item Context Menu */} +
    ) }) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config.ts index 393d5f73f5..0e8395d5b8 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config.ts @@ -9,4 +9,5 @@ export const LoopTool = { name: 'Loop', icon: RepeatIcon, bgColor: '#2FB3FF', + docsLink: 'https://docs.sim.ai/blocks/loop', } as const diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config.ts index 52c6af69c8..758dd084ea 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config.ts @@ -9,4 +9,5 @@ export const ParallelTool = { name: 'Parallel', icon: SplitIcon, bgColor: '#FEE12B', + docsLink: 'https://docs.sim.ai/blocks/parallel', } as const diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/log-row-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/log-row-context-menu.tsx index 06c654dcf1..009b24b8fb 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/log-row-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/log-row-context-menu.tsx @@ -66,7 +66,7 @@ export function LogRowContextMenu({ return ( !open && onClose()} variant='secondary' size='sm' colorScheme='inverted' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-context-menu.tsx index 8746a5bc39..2cb59f9f9a 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-context-menu.tsx @@ -52,7 +52,7 @@ export function OutputContextMenu({ return ( !open && onClose()} variant='secondary' size='sm' colorScheme='inverted' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/index.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/index.ts index 188d882e90..ef1aa6391c 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/index.ts @@ -1,4 +1,5 @@ export { HelpModal } from './help-modal/help-modal' +export { NavItemContextMenu } from './nav-item-context-menu' export { SearchModal } from './search-modal/search-modal' export { SettingsModal } from './settings-modal/settings-modal' export { UsageIndicator } from './usage-indicator/usage-indicator' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/index.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/index.ts new file mode 100644 index 0000000000..f64d3d5451 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/index.ts @@ -0,0 +1 @@ +export { NavItemContextMenu } from './nav-item-context-menu' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/nav-item-context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/nav-item-context-menu.tsx new file mode 100644 index 0000000000..060f76d6c1 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/nav-item-context-menu/nav-item-context-menu.tsx @@ -0,0 +1,81 @@ +'use client' + +import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn' + +interface NavItemContextMenuProps { + /** + * Whether the context menu is open + */ + isOpen: boolean + /** + * Position of the context menu + */ + position: { x: number; y: number } + /** + * Ref for the menu element + */ + menuRef: React.RefObject + /** + * Callback when menu should close + */ + onClose: () => void + /** + * Callback when open in new tab is clicked + */ + onOpenInNewTab: () => void + /** + * Callback when copy link is clicked + */ + onCopyLink: () => void +} + +/** + * Context menu component for sidebar navigation items. + * Displays navigation-appropriate options (open in new tab, copy link) in a popover at the right-click position. + */ +export function NavItemContextMenu({ + isOpen, + position, + menuRef, + onClose, + onOpenInNewTab, + onCopyLink, +}: NavItemContextMenuProps) { + return ( + !open && onClose()} + variant='secondary' + size='sm' + colorScheme='inverted' + > + + + { + onOpenInNewTab() + onClose() + }} + > + Open in new tab + + { + onCopyLink() + onClose() + }} + > + Copy link + + + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/context-menu/context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/context-menu/context-menu.tsx index e14c1d5997..078801141f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/context-menu/context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/context-menu/context-menu.tsx @@ -150,7 +150,7 @@ export function ContextMenu({ return ( !open && onClose()} variant='secondary' size='sm' colorScheme='inverted' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx index 09c3f75a36..b7ab8cf533 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx @@ -13,6 +13,7 @@ import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/provide import { createCommands } from '@/app/workspace/[workspaceId]/utils/commands-utils' import { HelpModal, + NavItemContextMenu, SearchModal, SettingsModal, UsageIndicator, @@ -20,6 +21,7 @@ import { WorkspaceHeader, } from '@/app/workspace/[workspaceId]/w/components/sidebar/components' import { + useContextMenu, useFolderOperations, useSidebarResize, useWorkflowOperations, @@ -168,6 +170,46 @@ export function Sidebar() { workspaceId, }) + /** Context menu state for navigation items */ + const [activeNavItemHref, setActiveNavItemHref] = useState(null) + const { + isOpen: isNavContextMenuOpen, + position: navContextMenuPosition, + menuRef: navMenuRef, + handleContextMenu: handleNavContextMenuBase, + closeMenu: closeNavContextMenu, + } = useContextMenu() + + const handleNavItemContextMenu = useCallback( + (e: React.MouseEvent, href: string) => { + setActiveNavItemHref(href) + handleNavContextMenuBase(e) + }, + [handleNavContextMenuBase] + ) + + const handleNavContextMenuClose = useCallback(() => { + closeNavContextMenu() + setActiveNavItemHref(null) + }, [closeNavContextMenu]) + + const handleNavOpenInNewTab = useCallback(() => { + if (activeNavItemHref) { + window.open(activeNavItemHref, '_blank', 'noopener,noreferrer') + } + }, [activeNavItemHref]) + + const handleNavCopyLink = useCallback(async () => { + if (activeNavItemHref) { + const fullUrl = `${window.location.origin}${activeNavItemHref}` + try { + await navigator.clipboard.writeText(fullUrl) + } catch (error) { + logger.error('Failed to copy link to clipboard', { error }) + } + } + }, [activeNavItemHref]) + const { handleDuplicateWorkspace: duplicateWorkspace } = useDuplicateWorkspace({ getWorkspaceId: () => workspaceId, }) @@ -629,12 +671,23 @@ export function Sidebar() { href={item.href!} data-item-id={item.id} className={`${baseClasses} ${activeClasses}`} + onContextMenu={(e) => handleNavItemContextMenu(e, item.href!)} > {content} ) })}
    + + {/* Nav Item Context Menu */} +
    diff --git a/apps/sim/blocks/blocks/discord.ts b/apps/sim/blocks/blocks/discord.ts index 37e0f30f1e..0d1108a097 100644 --- a/apps/sim/blocks/blocks/discord.ts +++ b/apps/sim/blocks/blocks/discord.ts @@ -13,6 +13,7 @@ export const DiscordBlock: BlockConfig = { category: 'tools', bgColor: '#5865F2', icon: DiscordIcon, + docsLink: 'https://docs.sim.ai/tools/discord', subBlocks: [ { id: 'operation', diff --git a/apps/sim/blocks/blocks/fireflies.ts b/apps/sim/blocks/blocks/fireflies.ts index 6c4ccb8e37..b092471902 100644 --- a/apps/sim/blocks/blocks/fireflies.ts +++ b/apps/sim/blocks/blocks/fireflies.ts @@ -12,7 +12,7 @@ export const FirefliesBlock: BlockConfig = { triggerAllowed: true, longDescription: 'Integrate Fireflies.ai into the workflow. Manage meeting transcripts, add bot to live meetings, create soundbites, and more. Can also trigger workflows when transcriptions complete.', - docsLink: 'https://docs.fireflies.ai', + docsLink: 'https://docs.sim.ai/tools/fireflies', category: 'tools', icon: FirefliesIcon, bgColor: '#100730', diff --git a/apps/sim/blocks/blocks/generic_webhook.ts b/apps/sim/blocks/blocks/generic_webhook.ts index 6ae41dbc88..97ed9c8ec4 100644 --- a/apps/sim/blocks/blocks/generic_webhook.ts +++ b/apps/sim/blocks/blocks/generic_webhook.ts @@ -13,6 +13,7 @@ export const GenericWebhookBlock: BlockConfig = { category: 'triggers', icon: WebhookIcon, bgColor: '#10B981', // Green color for triggers + docsLink: 'https://docs.sim.ai/triggers/webhook', triggerAllowed: true, bestPractices: ` - You can test the webhook by sending a request to the webhook URL. E.g. depending on authorization: curl -X POST http://localhost:3000/api/webhooks/trigger/d8abcf0d-1ee5-4b77-bb07-b1e8142ea4e9 -H "Content-Type: application/json" -H "X-Sim-Secret: 1234" -d '{"message": "Test webhook trigger", "data": {"key": "v"}}' diff --git a/apps/sim/blocks/blocks/grain.ts b/apps/sim/blocks/blocks/grain.ts index 54fe53f120..86fdbf544e 100644 --- a/apps/sim/blocks/blocks/grain.ts +++ b/apps/sim/blocks/blocks/grain.ts @@ -13,6 +13,7 @@ export const GrainBlock: BlockConfig = { longDescription: 'Integrate Grain into your workflow. Access meeting recordings, transcripts, highlights, and AI-generated summaries. Can also trigger workflows based on Grain webhook events.', category: 'tools', + docsLink: 'https://docs.sim.ai/tools/grain', icon: GrainIcon, bgColor: '#F6FAF9', subBlocks: [ diff --git a/apps/sim/blocks/blocks/imap.ts b/apps/sim/blocks/blocks/imap.ts index 683928f197..33cc6e0ec6 100644 --- a/apps/sim/blocks/blocks/imap.ts +++ b/apps/sim/blocks/blocks/imap.ts @@ -12,6 +12,7 @@ export const ImapBlock: BlockConfig = { bgColor: '#6366F1', icon: MailServerIcon, triggerAllowed: true, + docsLink: 'https://docs.sim.ai/tools/imap', hideFromToolbar: false, subBlocks: [...getTrigger('imap_poller').subBlocks], tools: { diff --git a/apps/sim/blocks/blocks/router.ts b/apps/sim/blocks/blocks/router.ts index ae6672a309..5f8422c13c 100644 --- a/apps/sim/blocks/blocks/router.ts +++ b/apps/sim/blocks/blocks/router.ts @@ -164,6 +164,7 @@ export const RouterBlock: BlockConfig = { name: 'Router (Legacy)', description: 'Route workflow', authMode: AuthMode.ApiKey, + docsLink: 'https://docs.sim.ai/blocks/router', longDescription: 'This is a core workflow block. Intelligently direct workflow execution to different paths based on input analysis. Use natural language to instruct the router to route to certain blocks based on the input.', bestPractices: ` @@ -283,6 +284,7 @@ export const RouterV2Block: BlockConfig = { name: 'Router', description: 'Route workflow based on context', authMode: AuthMode.ApiKey, + docsLink: 'https://docs.sim.ai/blocks/router', longDescription: 'Intelligently route workflow execution to different paths based on context analysis. Define multiple routes with descriptions, and an LLM will determine which route to take based on the provided context.', bestPractices: ` diff --git a/apps/sim/blocks/blocks/rss.ts b/apps/sim/blocks/blocks/rss.ts index a066e1312e..d91f8a6b82 100644 --- a/apps/sim/blocks/blocks/rss.ts +++ b/apps/sim/blocks/blocks/rss.ts @@ -12,6 +12,7 @@ export const RssBlock: BlockConfig = { bgColor: '#F97316', icon: RssIcon, triggerAllowed: true, + docsLink: 'https://docs.sim.ai/triggers/rss', subBlocks: [...getTrigger('rss_poller').subBlocks], diff --git a/apps/sim/blocks/blocks/start_trigger.ts b/apps/sim/blocks/blocks/start_trigger.ts index c32a8eb0f9..32fe599174 100644 --- a/apps/sim/blocks/blocks/start_trigger.ts +++ b/apps/sim/blocks/blocks/start_trigger.ts @@ -15,6 +15,7 @@ export const StartTriggerBlock: BlockConfig = { `, category: 'triggers', bgColor: '#34B5FF', + docsLink: 'https://docs.sim.ai/triggers/start', icon: StartIcon, hideFromToolbar: false, subBlocks: [ diff --git a/apps/sim/blocks/blocks/twilio.ts b/apps/sim/blocks/blocks/twilio.ts index 09e5f7a0fe..8f5db2d9b0 100644 --- a/apps/sim/blocks/blocks/twilio.ts +++ b/apps/sim/blocks/blocks/twilio.ts @@ -10,6 +10,7 @@ export const TwilioSMSBlock: BlockConfig = { authMode: AuthMode.ApiKey, longDescription: 'Integrate Twilio into the workflow. Can send SMS messages.', category: 'tools', + docsLink: 'https://docs.sim.ai/tools/twilio', bgColor: '#F22F46', // Twilio brand color icon: TwilioIcon, subBlocks: [ diff --git a/apps/sim/blocks/blocks/twilio_voice.ts b/apps/sim/blocks/blocks/twilio_voice.ts index cd3d068261..d879545b13 100644 --- a/apps/sim/blocks/blocks/twilio_voice.ts +++ b/apps/sim/blocks/blocks/twilio_voice.ts @@ -12,6 +12,7 @@ export const TwilioVoiceBlock: BlockConfig = { longDescription: 'Integrate Twilio Voice into the workflow. Make outbound calls and retrieve call recordings.', category: 'tools', + docsLink: 'https://docs.sim.ai/tools/twilio_voice', bgColor: '#F22F46', // Twilio brand color icon: TwilioIcon, triggerAllowed: true, From fd76e98f0e53358123641fb5c3c766da873f1ed0 Mon Sep 17 00:00:00 2001 From: Adam Gough <77861281+aadamgough@users.noreply.github.com> Date: Fri, 9 Jan 2026 18:41:51 -0800 Subject: [PATCH 11/16] improvement(wand): added more wands (#2756) * added wand configs * fixed greptile comments --- apps/sim/blocks/blocks/google_sheets.ts | 27 ++++ apps/sim/blocks/blocks/microsoft_excel.ts | 78 +++++++++++ apps/sim/blocks/blocks/sharepoint.ts | 155 +++++++++++++++++++++- apps/sim/blocks/blocks/supabase.ts | 32 +++++ apps/sim/blocks/blocks/twilio_voice.ts | 56 ++++++++ apps/sim/triggers/twilio_voice/webhook.ts | 59 ++++++++ 6 files changed, 401 insertions(+), 6 deletions(-) diff --git a/apps/sim/blocks/blocks/google_sheets.ts b/apps/sim/blocks/blocks/google_sheets.ts index f026fe42cf..c500f5f3d4 100644 --- a/apps/sim/blocks/blocks/google_sheets.ts +++ b/apps/sim/blocks/blocks/google_sheets.ts @@ -73,6 +73,33 @@ export const GoogleSheetsBlock: BlockConfig = { title: 'Range', type: 'short-input', placeholder: 'Sheet name and cell range (e.g., Sheet1!A1:D10)', + wandConfig: { + enabled: true, + prompt: `Generate a valid Google Sheets range based on the user's description. + +### VALID FORMATS +1. Sheet name only (for appending to end): Sheet1 +2. Full range (for reading/writing specific cells): Sheet1!A1:D10 + +### RANGE RULES +- Sheet names with spaces must be quoted: 'My Sheet'!A1:B10 +- Column letters are uppercase: A, B, C, ... Z, AA, AB, etc. +- Row numbers start at 1 (not 0) +- Range format: SheetName!StartCell:EndCell (e.g., Sheet1!A2:C10) +- For a single column: Sheet1!A:A +- For a single row: Sheet1!1:1 + +### EXAMPLES +- "the first sheet" -> Sheet1 +- "data sheet from A1 to E100" -> 'Data Sheet'!A1:E100 +- "append to orders sheet" -> Orders +- "cells A1 through C50 on Sheet2" -> Sheet2!A1:C50 +- "column A of inventory" -> Inventory!A:A +- "just the headers row" -> Sheet1!1:1 + +Return ONLY the range string - no explanations, no quotes around the entire output, no extra text.`, + placeholder: 'Describe the range (e.g., "all data from Sheet1" or "A1 to D50")...', + }, }, // Write-specific Fields { diff --git a/apps/sim/blocks/blocks/microsoft_excel.ts b/apps/sim/blocks/blocks/microsoft_excel.ts index 4d64ed4e3f..406942327d 100644 --- a/apps/sim/blocks/blocks/microsoft_excel.ts +++ b/apps/sim/blocks/blocks/microsoft_excel.ts @@ -70,6 +70,33 @@ export const MicrosoftExcelBlock: BlockConfig = { type: 'short-input', placeholder: 'Sheet name and cell range (e.g., Sheet1!A1:D10)', condition: { field: 'operation', value: ['read', 'write', 'update'] }, + wandConfig: { + enabled: true, + prompt: `Generate a valid Microsoft Excel range based on the user's description. + +### FORMAT (REQUIRED) +SheetName!StartCell:EndCell + +Excel ALWAYS requires the full range format with both sheet name and cell range. + +### RANGE RULES +- Sheet names with spaces must be quoted: 'My Sheet'!A1:B10 +- Column letters are uppercase: A, B, C, ... Z, AA, AB, etc. +- Row numbers start at 1 (not 0) +- For entire columns: Sheet1!A:Z +- For entire rows: Sheet1!1:100 + +### EXAMPLES +- "the first sheet" -> Sheet1!A1:Z1000 +- "data sheet from A1 to E100" -> 'Data Sheet'!A1:E100 +- "cells A1 through C50 on Sheet2" -> Sheet2!A1:C50 +- "column A of inventory" -> Inventory!A:A +- "just the headers row on Sheet1" -> Sheet1!1:1 +- "all data on sales sheet" -> 'Sales'!A1:Z1000 + +Return ONLY the range string - no explanations, no quotes around the entire output, no extra text.`, + placeholder: 'Describe the range (e.g., "A1 to D50 on Sheet1")...', + }, }, { id: 'tableName', @@ -95,6 +122,22 @@ export const MicrosoftExcelBlock: BlockConfig = { 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', condition: { field: 'operation', value: 'write' }, required: true, + wandConfig: { + enabled: true, + prompt: `Generate Microsoft Excel data as a JSON array based on the user's description. + +Format options: +1. Array of arrays: [["Header1", "Header2"], ["Value1", "Value2"]] +2. Array of objects: [{"column1": "value1", "column2": "value2"}] + +Examples: +- "sales data with product and revenue columns" -> [["Product", "Revenue"], ["Widget A", 1500], ["Widget B", 2300]] +- "list of employees with name and email" -> [{"name": "John Doe", "email": "john@example.com"}, {"name": "Jane Smith", "email": "jane@example.com"}] + +Return ONLY the JSON array - no explanations, no markdown, no extra text.`, + placeholder: 'Describe the data you want to write...', + generationType: 'json-object', + }, }, { id: 'valueInputOption', @@ -114,6 +157,22 @@ export const MicrosoftExcelBlock: BlockConfig = { 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', condition: { field: 'operation', value: 'update' }, required: true, + wandConfig: { + enabled: true, + prompt: `Generate Microsoft Excel data as a JSON array based on the user's description. + +Format options: +1. Array of arrays: [["Header1", "Header2"], ["Value1", "Value2"]] +2. Array of objects: [{"column1": "value1", "column2": "value2"}] + +Examples: +- "update with new prices" -> [["Product", "Price"], ["Widget A", 29.99], ["Widget B", 49.99]] +- "quarterly targets" -> [{"Q1": 10000, "Q2": 12000, "Q3": 15000, "Q4": 18000}] + +Return ONLY the JSON array - no explanations, no markdown, no extra text.`, + placeholder: 'Describe the data you want to update...', + generationType: 'json-object', + }, }, { id: 'valueInputOption', @@ -133,6 +192,25 @@ export const MicrosoftExcelBlock: BlockConfig = { 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', condition: { field: 'operation', value: 'table_add' }, required: true, + wandConfig: { + enabled: true, + prompt: `Generate Microsoft Excel table row data as a JSON array based on the user's description. + +Format options: +1. Array of arrays: [["Value1", "Value2"], ["Value3", "Value4"]] +2. Array of objects: [{"column1": "value1", "column2": "value2"}] + +Note: When adding to an existing table, do NOT include headers - only data rows. + +Examples: +- "add new sales record" -> [["2024-01-15", "Widget Pro", 5, 249.99]] +- "append customer info" -> [{"name": "Acme Corp", "contact": "John Smith", "status": "Active"}] +- "add multiple rows with name, age, city" -> [["Alice", 28, "NYC"], ["Bob", 35, "LA"]] + +Return ONLY the JSON array - no explanations, no markdown, no extra text.`, + placeholder: 'Describe the data you want to add to the table...', + generationType: 'json-object', + }, }, ], tools: { diff --git a/apps/sim/blocks/blocks/sharepoint.ts b/apps/sim/blocks/blocks/sharepoint.ts index 4030225653..5fe1dfb6df 100644 --- a/apps/sim/blocks/blocks/sharepoint.ts +++ b/apps/sim/blocks/blocks/sharepoint.ts @@ -132,14 +132,108 @@ export const SharepointBlock: BlockConfig = { type: 'short-input', placeholder: "Template (e.g., 'genericList')", condition: { field: 'operation', value: 'create_list' }, + wandConfig: { + enabled: true, + prompt: `Generate a SharePoint list template name based on the user's description. + +### AVAILABLE TEMPLATES +- genericList - Standard list for general data (default) +- documentLibrary - For storing and managing documents +- survey - For creating surveys and polls +- links - For storing hyperlinks +- announcements - For news and announcements +- contacts - For contact information (name, email, phone) +- events - For calendar events and scheduling +- tasks - For task tracking and project management +- discussionBoard - For team discussions and forums +- pictureLibrary - For storing images and photos +- issue - For issue/bug tracking + +### EXAMPLES +- "I want to track tasks" -> tasks +- "store documents" -> documentLibrary +- "team announcements" -> announcements +- "contact list" -> contacts +- "calendar events" -> events +- "general data" -> genericList +- "bug tracking" -> issue +- "photo gallery" -> pictureLibrary + +Return ONLY the template name - no explanations, no quotes, no extra text.`, + placeholder: 'Describe what kind of list you need...', + }, }, { - id: 'pageContent', - title: 'Page Content', + id: 'columnDefinitions', + title: 'Column Definitions', type: 'long-input', - placeholder: 'Provide page content', + placeholder: 'Optional: Define custom columns as JSON array', condition: { field: 'operation', value: ['create_list'] }, + wandConfig: { + enabled: true, + prompt: `Generate a JSON array of SharePoint list column definitions based on the user's description. + +### FORMAT +A JSON array of column definition objects. Each column needs at minimum a "name" and column type properties. + +### COLUMN TYPES AND PROPERTIES + +**Text Column:** +{"name": "ColumnName", "text": {}} +- For single line of text + +**Multi-line Text:** +{"name": "ColumnName", "text": {"allowMultipleLines": true}} + +**Number Column:** +{"name": "ColumnName", "number": {}} +- Optional: "minimum", "maximum", "decimalPlaces" + +**DateTime Column:** +{"name": "ColumnName", "dateTime": {"format": "dateOnly"}} +- format: "dateOnly" or "dateTime" + +**Boolean (Yes/No):** +{"name": "ColumnName", "boolean": {}} + +**Choice Column:** +{"name": "ColumnName", "choice": {"choices": ["Option1", "Option2", "Option3"]}} + +**Person Column:** +{"name": "ColumnName", "personOrGroup": {}} + +**Currency:** +{"name": "ColumnName", "currency": {"locale": "en-US"}} + +### EXAMPLES + +"add columns for status (choice: Active, Completed, On Hold), due date, and priority number" +-> [ + {"name": "Status", "choice": {"choices": ["Active", "Completed", "On Hold"]}}, + {"name": "DueDate", "dateTime": {"format": "dateOnly"}}, + {"name": "Priority", "number": {"minimum": 1, "maximum": 5}} +] + +"text column for description, yes/no for completed, date for start" +-> [ + {"name": "Description", "text": {"allowMultipleLines": true}}, + {"name": "Completed", "boolean": {}}, + {"name": "StartDate", "dateTime": {"format": "dateOnly"}} +] + +"assignee (person), budget (currency), category (choice: Marketing, Sales, Engineering)" +-> [ + {"name": "Assignee", "personOrGroup": {}}, + {"name": "Budget", "currency": {"locale": "en-US"}}, + {"name": "Category", "choice": {"choices": ["Marketing", "Sales", "Engineering"]}} +] + +Return ONLY the JSON array - no explanations, no markdown, no extra text.`, + placeholder: + 'Describe the columns you want to add (e.g., "status dropdown, due date, priority number")...', + generationType: 'json-object', + }, }, { id: 'listDescription', @@ -164,9 +258,50 @@ export const SharepointBlock: BlockConfig = { id: 'listItemFields', title: 'List Item Fields', type: 'long-input', - placeholder: 'Enter list item fields', + placeholder: + 'Enter list item fields as JSON (e.g., {"Title": "My Item", "Status": "Active"})', canonicalParamId: 'listItemFields', condition: { field: 'operation', value: ['update_list', 'add_list_items'] }, + wandConfig: { + enabled: true, + prompt: `Generate a JSON object for SharePoint list item fields based on the user's description. + +### FORMAT +A JSON object where keys are column internal names and values are the data to set. + +### RULES +- Use the column's internal name (often same as display name, but spaces become _x0020_) +- Common field names: Title, Status, Description, Priority, DueDate, AssignedTo, Category +- Date fields should use ISO 8601 format: "2024-01-15" or "2024-01-15T10:30:00Z" +- Number fields should be numeric, not strings +- Boolean fields use true/false +- Choice fields use the exact choice value as a string +- Person fields use the person's email or ID + +### READ-ONLY FIELDS (automatically filtered out) +Id, UniqueId, GUID, Created, Modified, Author, Editor, ContentTypeId + +### EXAMPLES + +"set title to Project Alpha and status to In Progress" +-> {"Title": "Project Alpha", "Status": "In Progress"} + +"update priority to high and due date to next Friday" +-> {"Priority": "High", "DueDate": "2024-01-19"} + +"add task with title Review Document, assigned to john@company.com" +-> {"Title": "Review Document", "AssignedToLookupId": "john@company.com"} + +"create contact with name John Smith, email john@example.com, phone 555-1234" +-> {"Title": "John Smith", "Email": "john@example.com", "WorkPhone": "555-1234"} + +"set completed to true and notes to Task finished successfully" +-> {"Completed": true, "Notes": "Task finished successfully"} + +Return ONLY the JSON object - no explanations, no markdown, no extra text.`, + placeholder: 'Describe the fields and values you want to set...', + generationType: 'json-object', + }, }, // Upload File operation fields @@ -267,6 +402,7 @@ export const SharepointBlock: BlockConfig = { includeItems, uploadFiles, files, + columnDefinitions, ...others } = rest as any @@ -314,7 +450,7 @@ export const SharepointBlock: BlockConfig = { // Handle file upload files parameter const fileParam = uploadFiles || files - const baseParams = { + const baseParams: Record = { credential, siteId: effectiveSiteId || undefined, pageSize: others.pageSize ? Number.parseInt(others.pageSize as string, 10) : undefined, @@ -331,6 +467,10 @@ export const SharepointBlock: BlockConfig = { baseParams.files = fileParam } + if (columnDefinitions) { + baseParams.pageContent = columnDefinitions + } + return baseParams }, }, @@ -339,7 +479,10 @@ export const SharepointBlock: BlockConfig = { operation: { type: 'string', description: 'Operation to perform' }, credential: { type: 'string', description: 'Microsoft account credential' }, pageName: { type: 'string', description: 'Page name' }, - pageContent: { type: 'string', description: 'Page content' }, + columnDefinitions: { + type: 'string', + description: 'Column definitions for list creation (JSON array)', + }, pageTitle: { type: 'string', description: 'Page title' }, pageId: { type: 'string', description: 'Page ID' }, siteSelector: { type: 'string', description: 'Site selector' }, diff --git a/apps/sim/blocks/blocks/supabase.ts b/apps/sim/blocks/blocks/supabase.ts index b6602362a4..4a60037fcc 100644 --- a/apps/sim/blocks/blocks/supabase.ts +++ b/apps/sim/blocks/blocks/supabase.ts @@ -402,6 +402,38 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e type: 'short-input', placeholder: 'column_name (add DESC for descending)', condition: { field: 'operation', value: 'query' }, + wandConfig: { + enabled: true, + prompt: `Generate a Supabase order by clause based on the user's description. + +### FORMAT +column_name [ASC|DESC] + +### RULES +- Column name only: sorts ascending by default +- Add DESC after column name for descending order +- Add ASC after column name for ascending order (explicit) +- Column names are case-sensitive and should match your database schema + +### COMMON PATTERNS +- Newest first: created_at DESC +- Oldest first: created_at ASC +- Alphabetical: name +- Reverse alphabetical: name DESC +- Highest value first: price DESC +- Lowest value first: price ASC + +### EXAMPLES +- "sort by start time newest first" -> start_time DESC +- "order by name alphabetically" -> name +- "sort by created date oldest first" -> created_at ASC +- "highest scores first" -> score DESC +- "sort by updated timestamp descending" -> updated_at DESC +- "order by email" -> email + +Return ONLY the order by expression - no explanations, no extra text.`, + placeholder: 'Describe how to sort (e.g., "newest first by created_at")...', + }, }, // Optional limit for query operation { diff --git a/apps/sim/blocks/blocks/twilio_voice.ts b/apps/sim/blocks/blocks/twilio_voice.ts index d879545b13..b9692c1089 100644 --- a/apps/sim/blocks/blocks/twilio_voice.ts +++ b/apps/sim/blocks/blocks/twilio_voice.ts @@ -87,6 +87,62 @@ export const TwilioVoiceBlock: BlockConfig = { field: 'operation', value: 'make_call', }, + wandConfig: { + enabled: true, + prompt: `Generate TwiML (Twilio Markup Language) for outbound voice calls based on the user's description. + +### IMPORTANT: Use SQUARE BRACKETS instead of angle brackets +- Use [Tag] instead of +- Use [/Tag] instead of +- Use [Tag/] for self-closing tags instead of + +### COMMON TWIML VERBS + +**[Say]** - Text-to-speech +[Say voice="alice"]Hello, this is an automated call.[/Say] +- Voices: alice, man, woman, Polly.Joanna, Polly.Matthew, etc. + +**[Play]** - Play audio file +[Play]https://example.com/audio.mp3[/Play] + +**[Record]** - Record caller's voice +[Record maxLength="120" transcribe="true"/] +- transcribe="true" to get text transcription + +**[Gather]** - Collect keypad input or speech +[Gather input="dtmf speech" timeout="5" numDigits="1"] + [Say]Press 1 to confirm, 2 to cancel.[/Say] +[/Gather] + +**[Dial]** - Connect to another number +[Dial]+14155551234[/Dial] + +**[Pause]** - Add silence +[Pause length="2"/] + +**[Hangup]** - End the call +[Hangup/] + +### EXAMPLES + +"say hello and deliver a reminder message" +-> [Response][Say voice="alice"]Hello! This is a reminder about your appointment tomorrow at 2 PM. Press 1 to confirm or 2 to reschedule.[/Say][Gather input="dtmf" timeout="10" numDigits="1"/][/Response] + +"play a recorded message" +-> [Response][Play]https://example.com/message.mp3[/Play][/Response] + +"say a message and record their response" +-> [Response][Say voice="alice"]Hello! Please leave your feedback after the beep.[/Say][Record maxLength="60" transcribe="true"/][Say voice="alice"]Thank you for your feedback. Goodbye.[/Say][/Response] + +"simple greeting message" +-> [Response][Say voice="alice"]Hello! This is an automated call from your service provider. Have a great day![/Say][/Response] + +"ask a yes or no question" +-> [Response][Say voice="alice"]Hello! Would you like to receive updates? Press 1 for yes, or 2 for no.[/Say][Gather input="dtmf" timeout="10" numDigits="1"/][Say voice="alice"]We didn't receive your response. Goodbye.[/Say][/Response] + +Return ONLY the TwiML with square brackets - no explanations, no markdown, no extra text.`, + placeholder: 'Describe what the call should say or do...', + }, }, { id: 'record', diff --git a/apps/sim/triggers/twilio_voice/webhook.ts b/apps/sim/triggers/twilio_voice/webhook.ts index d3fb1c62b2..c87194b77f 100644 --- a/apps/sim/triggers/twilio_voice/webhook.ts +++ b/apps/sim/triggers/twilio_voice/webhook.ts @@ -48,6 +48,65 @@ export const twilioVoiceWebhookTrigger: TriggerConfig = { 'TwiML instructions to return immediately to Twilio. Use square brackets instead of angle brackets (e.g., [Response] instead of ). This controls what happens when the call comes in (e.g., play a message, record, gather input). Your workflow will execute in the background.', required: false, mode: 'trigger', + wandConfig: { + enabled: true, + prompt: `Generate TwiML (Twilio Markup Language) for voice calls based on the user's description. + +### IMPORTANT: Use SQUARE BRACKETS instead of angle brackets +- Use [Tag] instead of +- Use [/Tag] instead of +- Use [Tag/] for self-closing tags instead of + +### COMMON TWIML VERBS + +**[Say]** - Text-to-speech +[Say voice="alice"]Hello, how can I help you?[/Say] +- Voices: alice, man, woman, Polly.Joanna, Polly.Matthew, etc. + +**[Play]** - Play audio file +[Play]https://example.com/audio.mp3[/Play] + +**[Record]** - Record caller's voice +[Record maxLength="120" transcribe="true"/] +- transcribe="true" to get text transcription + +**[Gather]** - Collect keypad input or speech +[Gather input="dtmf speech" timeout="5" numDigits="1"] + [Say]Press 1 for sales, 2 for support.[/Say] +[/Gather] + +**[Dial]** - Connect to another number +[Dial]+14155551234[/Dial] + +**[Pause]** - Add silence +[Pause length="2"/] + +**[Hangup]** - End the call +[Hangup/] + +**[Redirect]** - Redirect to another URL +[Redirect]https://example.com/next[/Redirect] + +### EXAMPLES + +"say hello and ask them to leave a message" +-> [Response][Say voice="alice"]Hello! Please leave a message after the beep.[/Say][Record maxLength="120" transcribe="true"/][/Response] + +"greet and offer options: press 1 for sales, 2 for support" +-> [Response][Gather input="dtmf" timeout="5" numDigits="1"][Say voice="alice"]Welcome! Press 1 for sales, or press 2 for support.[/Say][/Gather][Say]Sorry, I didn't get that. Goodbye.[/Say][/Response] + +"play hold music" +-> [Response][Say voice="alice"]Please hold while we connect you.[/Say][Play loop="0"]https://api.twilio.com/cowbell.mp3[/Play][/Response] + +"just say please wait" +-> [Response][Say voice="alice"]Please wait while we process your request.[/Say][/Response] + +"record a voicemail with transcription" +-> [Response][Say voice="alice"]You've reached our voicemail. Please leave a message.[/Say][Record transcribe="true" maxLength="180" playBeep="true"/][Say voice="alice"]Thank you for your message. Goodbye.[/Say][/Response] + +Return ONLY the TwiML with square brackets - no explanations, no markdown, no extra text.`, + placeholder: 'Describe what should happen when a call comes in...', + }, }, { id: 'triggerSave', From 47eb0603118a01b843cc86275478468632cdcb3b Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Fri, 9 Jan 2026 20:16:22 -0800 Subject: [PATCH 12/16] feat(enterprise): permission groups, access control (#2736) * feat(permission-groups): integration/model access controls for enterprise * feat: enterprise gating for BYOK, SSO, credential sets with org admin/owner checks * execution time enforcement of mcp and custom tools * add admin routes to cleanup permission group data * fix not being on enterprise checks * separate out orgs from billing system * update the docs * add custom tool blockers based on perm configs * add migrations * fix * address greptile comments * regen migrations * fix default model picking based on user config * cleaned up UI --- .../docs/content/docs/en/enterprise/index.mdx | 57 +- .../[id]/members/bulk/route.ts | 166 + .../permission-groups/[id]/members/route.ts | 229 + .../app/api/permission-groups/[id]/route.ts | 212 + apps/sim/app/api/permission-groups/route.ts | 185 + .../app/api/permission-groups/user/route.ts | 72 + .../app/api/v1/admin/access-control/route.ts | 169 + apps/sim/app/api/v1/admin/index.ts | 5 + .../admin/organizations/[id]/billing/route.ts | 41 +- .../[id]/members/[memberId]/route.ts | 3 +- .../admin/organizations/[id]/members/route.ts | 5 +- .../app/api/v1/admin/organizations/route.ts | 111 +- .../[workspaceId]/knowledge/page.tsx | 34 +- .../components/log-details/log-details.tsx | 18 +- .../[workspaceId]/templates/page.tsx | 7 + .../user-input/hooks/use-mention-data.ts | 26 +- .../components/combobox/combobox.tsx | 76 +- .../components/tool-input/tool-input.tsx | 85 +- .../panel/components/toolbar/toolbar.tsx | 12 +- .../w/[workflowId]/components/panel/panel.tsx | 54 +- .../components/search-modal/search-modal.tsx | 19 +- .../access-control/access-control.tsx | 1145 ++ .../components/api-keys/api-keys.tsx | 2 +- .../settings-modal/components/byok/byok.tsx | 4 +- .../components/copilot/copilot.tsx | 2 +- .../credential-sets/credential-sets.tsx | 4 +- .../components/custom-tools/custom-tools.tsx | 4 +- .../settings-modal/components/index.ts | 1 + .../components/integrations/integrations.tsx | 12 +- .../formatted-input/formatted-input.tsx | 66 - .../mcp/components/header-row/header-row.tsx | 81 - .../components/mcp/components/index.ts | 10 - .../server-list-item/server-list-item.tsx | 76 - .../components/mcp/components/types.ts | 35 - .../settings-modal/components/mcp/mcp.tsx | 273 +- .../workflow-mcp-servers.tsx | 8 +- .../settings-modal/settings-modal.tsx | 79 +- .../w/components/sidebar/sidebar.tsx | 71 +- apps/sim/executor/execution/block-executor.ts | 8 +- .../handlers/agent/agent-handler.test.ts | 1 + .../executor/handlers/agent/agent-handler.ts | 30 + .../handlers/evaluator/evaluator-handler.ts | 4 + .../handlers/router/router-handler.ts | 5 + apps/sim/executor/types.ts | 4 + apps/sim/executor/utils/permission-check.ts | 186 + apps/sim/hooks/queries/permission-groups.ts | 282 + apps/sim/hooks/use-permission-config.ts | 71 + apps/sim/lib/auth/auth.ts | 8 + apps/sim/lib/billing/core/subscription.ts | 49 + apps/sim/lib/billing/index.ts | 2 + .../lib/billing/validation/seat-management.ts | 16 +- apps/sim/lib/copilot/process-contents.ts | 22 +- .../client/workflow/manage-custom-tool.ts | 22 +- .../tools/client/workflow/manage-mcp-tool.ts | 22 +- .../tools/server/blocks/get-block-config.ts | 16 +- .../tools/server/blocks/get-block-options.ts | 13 +- .../server/blocks/get-blocks-and-tools.ts | 12 +- .../server/blocks/get-blocks-metadata-tool.ts | 18 +- .../tools/server/blocks/get-trigger-blocks.ts | 7 +- .../tools/server/workflow/edit-workflow.ts | 157 +- apps/sim/lib/core/config/env.ts | 10 + apps/sim/lib/core/config/feature-flags.ts | 20 +- apps/sim/lib/permission-groups/types.ts | 51 + packages/db/migrations/0137_yellow_korath.sql | 29 + .../db/migrations/meta/0137_snapshot.json | 9571 +++++++++++++++++ packages/db/migrations/meta/_journal.json | 7 + packages/db/schema.ts | 45 + 67 files changed, 13669 insertions(+), 478 deletions(-) create mode 100644 apps/sim/app/api/permission-groups/[id]/members/bulk/route.ts create mode 100644 apps/sim/app/api/permission-groups/[id]/members/route.ts create mode 100644 apps/sim/app/api/permission-groups/[id]/route.ts create mode 100644 apps/sim/app/api/permission-groups/route.ts create mode 100644 apps/sim/app/api/permission-groups/user/route.ts create mode 100644 apps/sim/app/api/v1/admin/access-control/route.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/access-control/access-control.tsx delete mode 100644 apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/formatted-input/formatted-input.tsx delete mode 100644 apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/header-row/header-row.tsx delete mode 100644 apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/server-list-item/server-list-item.tsx delete mode 100644 apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/types.ts create mode 100644 apps/sim/executor/utils/permission-check.ts create mode 100644 apps/sim/hooks/queries/permission-groups.ts create mode 100644 apps/sim/hooks/use-permission-config.ts create mode 100644 apps/sim/lib/permission-groups/types.ts create mode 100644 packages/db/migrations/0137_yellow_korath.sql create mode 100644 packages/db/migrations/meta/0137_snapshot.json diff --git a/apps/docs/content/docs/en/enterprise/index.mdx b/apps/docs/content/docs/en/enterprise/index.mdx index c5b451d83d..3e5acdf5e2 100644 --- a/apps/docs/content/docs/en/enterprise/index.mdx +++ b/apps/docs/content/docs/en/enterprise/index.mdx @@ -1,6 +1,6 @@ --- title: Enterprise -description: Enterprise features for organizations with advanced security and compliance requirements +description: Enterprise features for business organizations --- import { Callout } from 'fumadocs-ui/components/callout' @@ -9,6 +9,28 @@ Sim Studio Enterprise provides advanced features for organizations with enhanced --- +## Access Control + +Define permission groups to control what features and integrations team members can use. + +### Features + +- **Allowed Model Providers** - Restrict which AI providers users can access (OpenAI, Anthropic, Google, etc.) +- **Allowed Blocks** - Control which workflow blocks are available +- **Platform Settings** - Hide Knowledge Base, disable MCP tools, or disable custom tools + +### Setup + +1. Navigate to **Settings** → **Access Control** in your workspace +2. Create a permission group with your desired restrictions +3. Add team members to the permission group + + + Users not assigned to any permission group have full access. Permission restrictions are enforced at both UI and execution time. + + +--- + ## Bring Your Own Key (BYOK) Use your own API keys for AI model providers instead of Sim Studio's hosted keys. @@ -61,15 +83,38 @@ Enterprise authentication with SAML 2.0 and OIDC support for centralized identit --- -## Self-Hosted +## Self-Hosted Configuration + +For self-hosted deployments, enterprise features can be enabled via environment variables without requiring billing. -For self-hosted deployments, enterprise features can be enabled via environment variables: +### Environment Variables | Variable | Description | |----------|-------------| +| `ORGANIZATIONS_ENABLED`, `NEXT_PUBLIC_ORGANIZATIONS_ENABLED` | Enable team/organization management | +| `ACCESS_CONTROL_ENABLED`, `NEXT_PUBLIC_ACCESS_CONTROL_ENABLED` | Permission groups for access restrictions | | `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | Single Sign-On with SAML/OIDC | | `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Polling Groups for email triggers | - - BYOK is only available on hosted Sim Studio. Self-hosted deployments configure AI provider keys directly via environment variables. - +### Organization Management + +When billing is disabled, use the Admin API to manage organizations: + +```bash +# Create an organization +curl -X POST https://your-instance/api/v1/admin/organizations \ + -H "x-admin-key: YOUR_ADMIN_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{"name": "My Organization", "ownerId": "user-id-here"}' + +# Add a member +curl -X POST https://your-instance/api/v1/admin/organizations/{orgId}/members \ + -H "x-admin-key: YOUR_ADMIN_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{"userId": "user-id-here", "role": "admin"}' +``` + +### Notes + +- Enabling `ACCESS_CONTROL_ENABLED` automatically enables organizations, as access control requires organization membership. +- BYOK is only available on hosted Sim Studio. Self-hosted deployments configure AI provider keys directly via environment variables. diff --git a/apps/sim/app/api/permission-groups/[id]/members/bulk/route.ts b/apps/sim/app/api/permission-groups/[id]/members/bulk/route.ts new file mode 100644 index 0000000000..c6e3faa2d2 --- /dev/null +++ b/apps/sim/app/api/permission-groups/[id]/members/bulk/route.ts @@ -0,0 +1,166 @@ +import { db } from '@sim/db' +import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, inArray } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasAccessControlAccess } from '@/lib/billing' + +const logger = createLogger('PermissionGroupBulkMembers') + +async function getPermissionGroupWithAccess(groupId: string, userId: string) { + const [group] = await db + .select({ + id: permissionGroup.id, + organizationId: permissionGroup.organizationId, + }) + .from(permissionGroup) + .where(eq(permissionGroup.id, groupId)) + .limit(1) + + if (!group) return null + + const [membership] = await db + .select({ role: member.role }) + .from(member) + .where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId))) + .limit(1) + + if (!membership) return null + + return { group, role: membership.role } +} + +const bulkAddSchema = z.object({ + userIds: z.array(z.string()).optional(), + addAllOrgMembers: z.boolean().optional(), +}) + +export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { id } = await params + + try { + const hasAccess = await hasAccessControlAccess(session.user.id) + if (!hasAccess) { + return NextResponse.json( + { error: 'Access Control is an Enterprise feature' }, + { status: 403 } + ) + } + + const result = await getPermissionGroupWithAccess(id, session.user.id) + + if (!result) { + return NextResponse.json({ error: 'Permission group not found' }, { status: 404 }) + } + + if (result.role !== 'admin' && result.role !== 'owner') { + return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 }) + } + + const body = await req.json() + const { userIds, addAllOrgMembers } = bulkAddSchema.parse(body) + + let targetUserIds: string[] = [] + + if (addAllOrgMembers) { + const orgMembers = await db + .select({ userId: member.userId }) + .from(member) + .where(eq(member.organizationId, result.group.organizationId)) + + targetUserIds = orgMembers.map((m) => m.userId) + } else if (userIds && userIds.length > 0) { + const validMembers = await db + .select({ userId: member.userId }) + .from(member) + .where( + and( + eq(member.organizationId, result.group.organizationId), + inArray(member.userId, userIds) + ) + ) + + targetUserIds = validMembers.map((m) => m.userId) + } + + if (targetUserIds.length === 0) { + return NextResponse.json({ added: 0, moved: 0 }) + } + + const existingMemberships = await db + .select({ + id: permissionGroupMember.id, + userId: permissionGroupMember.userId, + permissionGroupId: permissionGroupMember.permissionGroupId, + }) + .from(permissionGroupMember) + .where(inArray(permissionGroupMember.userId, targetUserIds)) + + const alreadyInThisGroup = new Set( + existingMemberships.filter((m) => m.permissionGroupId === id).map((m) => m.userId) + ) + const usersToAdd = targetUserIds.filter((uid) => !alreadyInThisGroup.has(uid)) + + if (usersToAdd.length === 0) { + return NextResponse.json({ added: 0, moved: 0 }) + } + + const membershipsToDelete = existingMemberships.filter( + (m) => m.permissionGroupId !== id && usersToAdd.includes(m.userId) + ) + const movedCount = membershipsToDelete.length + + await db.transaction(async (tx) => { + if (membershipsToDelete.length > 0) { + await tx.delete(permissionGroupMember).where( + inArray( + permissionGroupMember.id, + membershipsToDelete.map((m) => m.id) + ) + ) + } + + const newMembers = usersToAdd.map((userId) => ({ + id: crypto.randomUUID(), + permissionGroupId: id, + userId, + assignedBy: session.user.id, + assignedAt: new Date(), + })) + + await tx.insert(permissionGroupMember).values(newMembers) + }) + + logger.info('Bulk added members to permission group', { + permissionGroupId: id, + addedCount: usersToAdd.length, + movedCount, + assignedBy: session.user.id, + }) + + return NextResponse.json({ added: usersToAdd.length, moved: movedCount }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: error.errors[0].message }, { status: 400 }) + } + if ( + error instanceof Error && + error.message.includes('permission_group_member_user_id_unique') + ) { + return NextResponse.json( + { error: 'One or more users are already in a permission group' }, + { status: 409 } + ) + } + logger.error('Error bulk adding members to permission group', error) + return NextResponse.json({ error: 'Failed to add members' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/permission-groups/[id]/members/route.ts b/apps/sim/app/api/permission-groups/[id]/members/route.ts new file mode 100644 index 0000000000..4979da755e --- /dev/null +++ b/apps/sim/app/api/permission-groups/[id]/members/route.ts @@ -0,0 +1,229 @@ +import { db } from '@sim/db' +import { member, permissionGroup, permissionGroupMember, user } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasAccessControlAccess } from '@/lib/billing' + +const logger = createLogger('PermissionGroupMembers') + +async function getPermissionGroupWithAccess(groupId: string, userId: string) { + const [group] = await db + .select({ + id: permissionGroup.id, + organizationId: permissionGroup.organizationId, + }) + .from(permissionGroup) + .where(eq(permissionGroup.id, groupId)) + .limit(1) + + if (!group) return null + + const [membership] = await db + .select({ role: member.role }) + .from(member) + .where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId))) + .limit(1) + + if (!membership) return null + + return { group, role: membership.role } +} + +export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { id } = await params + const result = await getPermissionGroupWithAccess(id, session.user.id) + + if (!result) { + return NextResponse.json({ error: 'Permission group not found' }, { status: 404 }) + } + + const members = await db + .select({ + id: permissionGroupMember.id, + userId: permissionGroupMember.userId, + assignedAt: permissionGroupMember.assignedAt, + userName: user.name, + userEmail: user.email, + userImage: user.image, + }) + .from(permissionGroupMember) + .leftJoin(user, eq(permissionGroupMember.userId, user.id)) + .where(eq(permissionGroupMember.permissionGroupId, id)) + + return NextResponse.json({ members }) +} + +const addMemberSchema = z.object({ + userId: z.string().min(1), +}) + +export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { id } = await params + + try { + const hasAccess = await hasAccessControlAccess(session.user.id) + if (!hasAccess) { + return NextResponse.json( + { error: 'Access Control is an Enterprise feature' }, + { status: 403 } + ) + } + + const result = await getPermissionGroupWithAccess(id, session.user.id) + + if (!result) { + return NextResponse.json({ error: 'Permission group not found' }, { status: 404 }) + } + + if (result.role !== 'admin' && result.role !== 'owner') { + return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 }) + } + + const body = await req.json() + const { userId } = addMemberSchema.parse(body) + + const [orgMember] = await db + .select({ id: member.id }) + .from(member) + .where(and(eq(member.userId, userId), eq(member.organizationId, result.group.organizationId))) + .limit(1) + + if (!orgMember) { + return NextResponse.json( + { error: 'User is not a member of this organization' }, + { status: 400 } + ) + } + + const [existingMembership] = await db + .select({ + id: permissionGroupMember.id, + permissionGroupId: permissionGroupMember.permissionGroupId, + }) + .from(permissionGroupMember) + .where(eq(permissionGroupMember.userId, userId)) + .limit(1) + + if (existingMembership?.permissionGroupId === id) { + return NextResponse.json( + { error: 'User is already in this permission group' }, + { status: 409 } + ) + } + + const newMember = await db.transaction(async (tx) => { + if (existingMembership) { + await tx + .delete(permissionGroupMember) + .where(eq(permissionGroupMember.id, existingMembership.id)) + } + + const memberData = { + id: crypto.randomUUID(), + permissionGroupId: id, + userId, + assignedBy: session.user.id, + assignedAt: new Date(), + } + + await tx.insert(permissionGroupMember).values(memberData) + return memberData + }) + + logger.info('Added member to permission group', { + permissionGroupId: id, + userId, + assignedBy: session.user.id, + }) + + return NextResponse.json({ member: newMember }, { status: 201 }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: error.errors[0].message }, { status: 400 }) + } + if ( + error instanceof Error && + error.message.includes('permission_group_member_user_id_unique') + ) { + return NextResponse.json({ error: 'User is already in a permission group' }, { status: 409 }) + } + logger.error('Error adding member to permission group', error) + return NextResponse.json({ error: 'Failed to add member' }, { status: 500 }) + } +} + +export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { id } = await params + const { searchParams } = new URL(req.url) + const memberId = searchParams.get('memberId') + + if (!memberId) { + return NextResponse.json({ error: 'memberId is required' }, { status: 400 }) + } + + try { + const hasAccess = await hasAccessControlAccess(session.user.id) + if (!hasAccess) { + return NextResponse.json( + { error: 'Access Control is an Enterprise feature' }, + { status: 403 } + ) + } + + const result = await getPermissionGroupWithAccess(id, session.user.id) + + if (!result) { + return NextResponse.json({ error: 'Permission group not found' }, { status: 404 }) + } + + if (result.role !== 'admin' && result.role !== 'owner') { + return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 }) + } + + const [memberToRemove] = await db + .select() + .from(permissionGroupMember) + .where( + and(eq(permissionGroupMember.id, memberId), eq(permissionGroupMember.permissionGroupId, id)) + ) + .limit(1) + + if (!memberToRemove) { + return NextResponse.json({ error: 'Member not found' }, { status: 404 }) + } + + await db.delete(permissionGroupMember).where(eq(permissionGroupMember.id, memberId)) + + logger.info('Removed member from permission group', { + permissionGroupId: id, + memberId, + userId: session.user.id, + }) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error('Error removing member from permission group', error) + return NextResponse.json({ error: 'Failed to remove member' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/permission-groups/[id]/route.ts b/apps/sim/app/api/permission-groups/[id]/route.ts new file mode 100644 index 0000000000..5e1486ff26 --- /dev/null +++ b/apps/sim/app/api/permission-groups/[id]/route.ts @@ -0,0 +1,212 @@ +import { db } from '@sim/db' +import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasAccessControlAccess } from '@/lib/billing' +import { + type PermissionGroupConfig, + parsePermissionGroupConfig, +} from '@/lib/permission-groups/types' + +const logger = createLogger('PermissionGroup') + +const configSchema = z.object({ + allowedIntegrations: z.array(z.string()).nullable().optional(), + allowedModelProviders: z.array(z.string()).nullable().optional(), + hideTraceSpans: z.boolean().optional(), + hideKnowledgeBaseTab: z.boolean().optional(), + hideCopilot: z.boolean().optional(), + hideApiKeysTab: z.boolean().optional(), + hideEnvironmentTab: z.boolean().optional(), + hideFilesTab: z.boolean().optional(), + disableMcpTools: z.boolean().optional(), + disableCustomTools: z.boolean().optional(), + hideTemplates: z.boolean().optional(), +}) + +const updateSchema = z.object({ + name: z.string().trim().min(1).max(100).optional(), + description: z.string().max(500).nullable().optional(), + config: configSchema.optional(), +}) + +async function getPermissionGroupWithAccess(groupId: string, userId: string) { + const [group] = await db + .select({ + id: permissionGroup.id, + organizationId: permissionGroup.organizationId, + name: permissionGroup.name, + description: permissionGroup.description, + config: permissionGroup.config, + createdBy: permissionGroup.createdBy, + createdAt: permissionGroup.createdAt, + updatedAt: permissionGroup.updatedAt, + }) + .from(permissionGroup) + .where(eq(permissionGroup.id, groupId)) + .limit(1) + + if (!group) return null + + const [membership] = await db + .select({ role: member.role }) + .from(member) + .where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId))) + .limit(1) + + if (!membership) return null + + return { group, role: membership.role } +} + +export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { id } = await params + const result = await getPermissionGroupWithAccess(id, session.user.id) + + if (!result) { + return NextResponse.json({ error: 'Permission group not found' }, { status: 404 }) + } + + return NextResponse.json({ + permissionGroup: { + ...result.group, + config: parsePermissionGroupConfig(result.group.config), + }, + }) +} + +export async function PUT(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { id } = await params + + try { + const hasAccess = await hasAccessControlAccess(session.user.id) + if (!hasAccess) { + return NextResponse.json( + { error: 'Access Control is an Enterprise feature' }, + { status: 403 } + ) + } + + const result = await getPermissionGroupWithAccess(id, session.user.id) + + if (!result) { + return NextResponse.json({ error: 'Permission group not found' }, { status: 404 }) + } + + if (result.role !== 'admin' && result.role !== 'owner') { + return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 }) + } + + const body = await req.json() + const updates = updateSchema.parse(body) + + if (updates.name) { + const existingGroup = await db + .select({ id: permissionGroup.id }) + .from(permissionGroup) + .where( + and( + eq(permissionGroup.organizationId, result.group.organizationId), + eq(permissionGroup.name, updates.name) + ) + ) + .limit(1) + + if (existingGroup.length > 0 && existingGroup[0].id !== id) { + return NextResponse.json( + { error: 'A permission group with this name already exists' }, + { status: 409 } + ) + } + } + + const currentConfig = parsePermissionGroupConfig(result.group.config) + const newConfig: PermissionGroupConfig = updates.config + ? { ...currentConfig, ...updates.config } + : currentConfig + + await db + .update(permissionGroup) + .set({ + ...(updates.name !== undefined && { name: updates.name }), + ...(updates.description !== undefined && { description: updates.description }), + config: newConfig, + updatedAt: new Date(), + }) + .where(eq(permissionGroup.id, id)) + + const [updated] = await db + .select() + .from(permissionGroup) + .where(eq(permissionGroup.id, id)) + .limit(1) + + return NextResponse.json({ + permissionGroup: { + ...updated, + config: parsePermissionGroupConfig(updated.config), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: error.errors[0].message }, { status: 400 }) + } + logger.error('Error updating permission group', error) + return NextResponse.json({ error: 'Failed to update permission group' }, { status: 500 }) + } +} + +export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { id } = await params + + try { + const hasAccess = await hasAccessControlAccess(session.user.id) + if (!hasAccess) { + return NextResponse.json( + { error: 'Access Control is an Enterprise feature' }, + { status: 403 } + ) + } + + const result = await getPermissionGroupWithAccess(id, session.user.id) + + if (!result) { + return NextResponse.json({ error: 'Permission group not found' }, { status: 404 }) + } + + if (result.role !== 'admin' && result.role !== 'owner') { + return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 }) + } + + await db.delete(permissionGroupMember).where(eq(permissionGroupMember.permissionGroupId, id)) + await db.delete(permissionGroup).where(eq(permissionGroup.id, id)) + + logger.info('Deleted permission group', { permissionGroupId: id, userId: session.user.id }) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error('Error deleting permission group', error) + return NextResponse.json({ error: 'Failed to delete permission group' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/permission-groups/route.ts b/apps/sim/app/api/permission-groups/route.ts new file mode 100644 index 0000000000..a3c3a7512b --- /dev/null +++ b/apps/sim/app/api/permission-groups/route.ts @@ -0,0 +1,185 @@ +import { db } from '@sim/db' +import { member, organization, permissionGroup, permissionGroupMember, user } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, count, desc, eq } from 'drizzle-orm' +import { NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasAccessControlAccess } from '@/lib/billing' +import { + DEFAULT_PERMISSION_GROUP_CONFIG, + type PermissionGroupConfig, + parsePermissionGroupConfig, +} from '@/lib/permission-groups/types' + +const logger = createLogger('PermissionGroups') + +const configSchema = z.object({ + allowedIntegrations: z.array(z.string()).nullable().optional(), + allowedModelProviders: z.array(z.string()).nullable().optional(), + hideTraceSpans: z.boolean().optional(), + hideKnowledgeBaseTab: z.boolean().optional(), + hideCopilot: z.boolean().optional(), + hideApiKeysTab: z.boolean().optional(), + hideEnvironmentTab: z.boolean().optional(), + hideFilesTab: z.boolean().optional(), + disableMcpTools: z.boolean().optional(), + disableCustomTools: z.boolean().optional(), + hideTemplates: z.boolean().optional(), +}) + +const createSchema = z.object({ + organizationId: z.string().min(1), + name: z.string().trim().min(1).max(100), + description: z.string().max(500).optional(), + config: configSchema.optional(), +}) + +export async function GET(req: Request) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { searchParams } = new URL(req.url) + const organizationId = searchParams.get('organizationId') + + if (!organizationId) { + return NextResponse.json({ error: 'organizationId is required' }, { status: 400 }) + } + + const membership = await db + .select({ id: member.id, role: member.role }) + .from(member) + .where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId))) + .limit(1) + + if (membership.length === 0) { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + + const groups = await db + .select({ + id: permissionGroup.id, + name: permissionGroup.name, + description: permissionGroup.description, + config: permissionGroup.config, + createdBy: permissionGroup.createdBy, + createdAt: permissionGroup.createdAt, + updatedAt: permissionGroup.updatedAt, + creatorName: user.name, + creatorEmail: user.email, + }) + .from(permissionGroup) + .leftJoin(user, eq(permissionGroup.createdBy, user.id)) + .where(eq(permissionGroup.organizationId, organizationId)) + .orderBy(desc(permissionGroup.createdAt)) + + const groupsWithCounts = await Promise.all( + groups.map(async (group) => { + const [memberCount] = await db + .select({ count: count() }) + .from(permissionGroupMember) + .where(eq(permissionGroupMember.permissionGroupId, group.id)) + + return { + ...group, + config: parsePermissionGroupConfig(group.config), + memberCount: memberCount?.count ?? 0, + } + }) + ) + + return NextResponse.json({ permissionGroups: groupsWithCounts }) +} + +export async function POST(req: Request) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + try { + const hasAccess = await hasAccessControlAccess(session.user.id) + if (!hasAccess) { + return NextResponse.json( + { error: 'Access Control is an Enterprise feature' }, + { status: 403 } + ) + } + + const body = await req.json() + const { organizationId, name, description, config } = createSchema.parse(body) + + const membership = await db + .select({ id: member.id, role: member.role }) + .from(member) + .where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId))) + .limit(1) + + const role = membership[0]?.role + if (membership.length === 0 || (role !== 'admin' && role !== 'owner')) { + return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 }) + } + + const orgExists = await db + .select({ id: organization.id }) + .from(organization) + .where(eq(organization.id, organizationId)) + .limit(1) + + if (orgExists.length === 0) { + return NextResponse.json({ error: 'Organization not found' }, { status: 404 }) + } + + const existingGroup = await db + .select({ id: permissionGroup.id }) + .from(permissionGroup) + .where( + and(eq(permissionGroup.organizationId, organizationId), eq(permissionGroup.name, name)) + ) + .limit(1) + + if (existingGroup.length > 0) { + return NextResponse.json( + { error: 'A permission group with this name already exists' }, + { status: 409 } + ) + } + + const groupConfig: PermissionGroupConfig = { + ...DEFAULT_PERMISSION_GROUP_CONFIG, + ...config, + } + + const now = new Date() + const newGroup = { + id: crypto.randomUUID(), + organizationId, + name, + description: description || null, + config: groupConfig, + createdBy: session.user.id, + createdAt: now, + updatedAt: now, + } + + await db.insert(permissionGroup).values(newGroup) + + logger.info('Created permission group', { + permissionGroupId: newGroup.id, + organizationId, + userId: session.user.id, + }) + + return NextResponse.json({ permissionGroup: newGroup }, { status: 201 }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: error.errors[0].message }, { status: 400 }) + } + logger.error('Error creating permission group', error) + return NextResponse.json({ error: 'Failed to create permission group' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/permission-groups/user/route.ts b/apps/sim/app/api/permission-groups/user/route.ts new file mode 100644 index 0000000000..e41c826533 --- /dev/null +++ b/apps/sim/app/api/permission-groups/user/route.ts @@ -0,0 +1,72 @@ +import { db } from '@sim/db' +import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema' +import { and, eq } from 'drizzle-orm' +import { NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { isOrganizationOnEnterprisePlan } from '@/lib/billing' +import { parsePermissionGroupConfig } from '@/lib/permission-groups/types' + +export async function GET(req: Request) { + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { searchParams } = new URL(req.url) + const organizationId = searchParams.get('organizationId') + + if (!organizationId) { + return NextResponse.json({ error: 'organizationId is required' }, { status: 400 }) + } + + const [membership] = await db + .select({ id: member.id }) + .from(member) + .where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId))) + .limit(1) + + if (!membership) { + return NextResponse.json({ error: 'Not a member of this organization' }, { status: 403 }) + } + + // Short-circuit: if org is not on enterprise plan, ignore permission configs + const isEnterprise = await isOrganizationOnEnterprisePlan(organizationId) + if (!isEnterprise) { + return NextResponse.json({ + permissionGroupId: null, + groupName: null, + config: null, + }) + } + + const [groupMembership] = await db + .select({ + permissionGroupId: permissionGroupMember.permissionGroupId, + config: permissionGroup.config, + groupName: permissionGroup.name, + }) + .from(permissionGroupMember) + .innerJoin(permissionGroup, eq(permissionGroupMember.permissionGroupId, permissionGroup.id)) + .where( + and( + eq(permissionGroupMember.userId, session.user.id), + eq(permissionGroup.organizationId, organizationId) + ) + ) + .limit(1) + + if (!groupMembership) { + return NextResponse.json({ + permissionGroupId: null, + groupName: null, + config: null, + }) + } + + return NextResponse.json({ + permissionGroupId: groupMembership.permissionGroupId, + groupName: groupMembership.groupName, + config: parsePermissionGroupConfig(groupMembership.config), + }) +} diff --git a/apps/sim/app/api/v1/admin/access-control/route.ts b/apps/sim/app/api/v1/admin/access-control/route.ts new file mode 100644 index 0000000000..7da37edc8e --- /dev/null +++ b/apps/sim/app/api/v1/admin/access-control/route.ts @@ -0,0 +1,169 @@ +/** + * Admin Access Control (Permission Groups) API + * + * GET /api/v1/admin/access-control + * List all permission groups with optional filtering. + * + * Query Parameters: + * - organizationId?: string - Filter by organization ID + * + * Response: { data: AdminPermissionGroup[], pagination: PaginationMeta } + * + * DELETE /api/v1/admin/access-control + * Delete permission groups for an organization. + * Used when an enterprise plan churns to clean up access control data. + * + * Query Parameters: + * - organizationId: string - Delete all permission groups for this organization + * + * Response: { success: true, deletedCount: number, membersRemoved: number } + */ + +import { db } from '@sim/db' +import { organization, permissionGroup, permissionGroupMember, user } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { count, eq, inArray, sql } from 'drizzle-orm' +import { withAdminAuth } from '@/app/api/v1/admin/middleware' +import { + badRequestResponse, + internalErrorResponse, + singleResponse, +} from '@/app/api/v1/admin/responses' + +const logger = createLogger('AdminAccessControlAPI') + +export interface AdminPermissionGroup { + id: string + organizationId: string + organizationName: string | null + name: string + description: string | null + memberCount: number + createdAt: string + createdByUserId: string + createdByEmail: string | null +} + +export const GET = withAdminAuth(async (request) => { + const url = new URL(request.url) + const organizationId = url.searchParams.get('organizationId') + + try { + const baseQuery = db + .select({ + id: permissionGroup.id, + organizationId: permissionGroup.organizationId, + organizationName: organization.name, + name: permissionGroup.name, + description: permissionGroup.description, + createdAt: permissionGroup.createdAt, + createdByUserId: permissionGroup.createdBy, + createdByEmail: user.email, + }) + .from(permissionGroup) + .leftJoin(organization, eq(permissionGroup.organizationId, organization.id)) + .leftJoin(user, eq(permissionGroup.createdBy, user.id)) + + let groups + if (organizationId) { + groups = await baseQuery.where(eq(permissionGroup.organizationId, organizationId)) + } else { + groups = await baseQuery + } + + const groupsWithCounts = await Promise.all( + groups.map(async (group) => { + const [memberCount] = await db + .select({ count: count() }) + .from(permissionGroupMember) + .where(eq(permissionGroupMember.permissionGroupId, group.id)) + + return { + id: group.id, + organizationId: group.organizationId, + organizationName: group.organizationName, + name: group.name, + description: group.description, + memberCount: memberCount?.count ?? 0, + createdAt: group.createdAt.toISOString(), + createdByUserId: group.createdByUserId, + createdByEmail: group.createdByEmail, + } as AdminPermissionGroup + }) + ) + + logger.info('Admin API: Listed permission groups', { + organizationId, + count: groupsWithCounts.length, + }) + + return singleResponse({ + data: groupsWithCounts, + pagination: { + total: groupsWithCounts.length, + limit: groupsWithCounts.length, + offset: 0, + hasMore: false, + }, + }) + } catch (error) { + logger.error('Admin API: Failed to list permission groups', { error, organizationId }) + return internalErrorResponse('Failed to list permission groups') + } +}) + +export const DELETE = withAdminAuth(async (request) => { + const url = new URL(request.url) + const organizationId = url.searchParams.get('organizationId') + const reason = url.searchParams.get('reason') || 'Enterprise plan churn cleanup' + + if (!organizationId) { + return badRequestResponse('organizationId is required') + } + + try { + const existingGroups = await db + .select({ id: permissionGroup.id }) + .from(permissionGroup) + .where(eq(permissionGroup.organizationId, organizationId)) + + if (existingGroups.length === 0) { + logger.info('Admin API: No permission groups to delete', { organizationId }) + return singleResponse({ + success: true, + deletedCount: 0, + membersRemoved: 0, + message: 'No permission groups found for the given organization', + }) + } + + const groupIds = existingGroups.map((g) => g.id) + + const [memberCountResult] = await db + .select({ count: sql`count(*)` }) + .from(permissionGroupMember) + .where(inArray(permissionGroupMember.permissionGroupId, groupIds)) + + const membersToRemove = Number(memberCountResult?.count ?? 0) + + // Members are deleted via cascade when permission groups are deleted + await db.delete(permissionGroup).where(eq(permissionGroup.organizationId, organizationId)) + + logger.info('Admin API: Deleted permission groups', { + organizationId, + deletedCount: existingGroups.length, + membersRemoved: membersToRemove, + reason, + }) + + return singleResponse({ + success: true, + deletedCount: existingGroups.length, + membersRemoved: membersToRemove, + reason, + }) + } catch (error) { + logger.error('Admin API: Failed to delete permission groups', { error, organizationId }) + return internalErrorResponse('Failed to delete permission groups') + } +}) diff --git a/apps/sim/app/api/v1/admin/index.ts b/apps/sim/app/api/v1/admin/index.ts index f41409bf90..ad91e0c447 100644 --- a/apps/sim/app/api/v1/admin/index.ts +++ b/apps/sim/app/api/v1/admin/index.ts @@ -36,6 +36,7 @@ * * Organizations: * GET /api/v1/admin/organizations - List all organizations + * POST /api/v1/admin/organizations - Create organization (requires ownerId) * GET /api/v1/admin/organizations/:id - Get organization details * PATCH /api/v1/admin/organizations/:id - Update organization * GET /api/v1/admin/organizations/:id/members - List organization members @@ -55,6 +56,10 @@ * BYOK Keys: * GET /api/v1/admin/byok - List BYOK keys (?organizationId=X or ?workspaceId=X) * DELETE /api/v1/admin/byok - Delete BYOK keys for org/workspace + * + * Access Control (Permission Groups): + * GET /api/v1/admin/access-control - List permission groups (?organizationId=X) + * DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X) */ export type { AdminAuthFailure, AdminAuthResult, AdminAuthSuccess } from '@/app/api/v1/admin/auth' diff --git a/apps/sim/app/api/v1/admin/organizations/[id]/billing/route.ts b/apps/sim/app/api/v1/admin/organizations/[id]/billing/route.ts index 952b437144..b563699830 100644 --- a/apps/sim/app/api/v1/admin/organizations/[id]/billing/route.ts +++ b/apps/sim/app/api/v1/admin/organizations/[id]/billing/route.ts @@ -16,10 +16,11 @@ */ import { db } from '@sim/db' -import { organization } from '@sim/db/schema' +import { member, organization } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { count, eq } from 'drizzle-orm' import { getOrganizationBillingData } from '@/lib/billing/core/organization' +import { isBillingEnabled } from '@/lib/core/config/feature-flags' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -39,6 +40,42 @@ export const GET = withAdminAuthParams(async (_, context) => { const { id: organizationId } = await context.params try { + if (!isBillingEnabled) { + const [[orgData], [memberCount]] = await Promise.all([ + db.select().from(organization).where(eq(organization.id, organizationId)).limit(1), + db.select({ count: count() }).from(member).where(eq(member.organizationId, organizationId)), + ]) + + if (!orgData) { + return notFoundResponse('Organization') + } + + const data: AdminOrganizationBillingSummary = { + organizationId: orgData.id, + organizationName: orgData.name, + subscriptionPlan: 'none', + subscriptionStatus: 'none', + totalSeats: Number.MAX_SAFE_INTEGER, + usedSeats: memberCount?.count || 0, + availableSeats: Number.MAX_SAFE_INTEGER, + totalCurrentUsage: 0, + totalUsageLimit: Number.MAX_SAFE_INTEGER, + minimumBillingAmount: 0, + averageUsagePerMember: 0, + usagePercentage: 0, + billingPeriodStart: null, + billingPeriodEnd: null, + membersOverLimit: 0, + membersNearLimit: 0, + } + + logger.info( + `Admin API: Retrieved billing summary for organization ${organizationId} (billing disabled)` + ) + + return singleResponse(data) + } + const billingData = await getOrganizationBillingData(organizationId) if (!billingData) { diff --git a/apps/sim/app/api/v1/admin/organizations/[id]/members/[memberId]/route.ts b/apps/sim/app/api/v1/admin/organizations/[id]/members/[memberId]/route.ts index 2496c363c6..d3691a6720 100644 --- a/apps/sim/app/api/v1/admin/organizations/[id]/members/[memberId]/route.ts +++ b/apps/sim/app/api/v1/admin/organizations/[id]/members/[memberId]/route.ts @@ -30,6 +30,7 @@ import { member, organization, user, userStats } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { removeUserFromOrganization } from '@/lib/billing/organizations/membership' +import { isBillingEnabled } from '@/lib/core/config/feature-flags' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -182,7 +183,7 @@ export const PATCH = withAdminAuthParams(async (request, context) = export const DELETE = withAdminAuthParams(async (request, context) => { const { id: organizationId, memberId } = await context.params const url = new URL(request.url) - const skipBillingLogic = url.searchParams.get('skipBillingLogic') === 'true' + const skipBillingLogic = !isBillingEnabled || url.searchParams.get('skipBillingLogic') === 'true' try { const [orgData] = await db diff --git a/apps/sim/app/api/v1/admin/organizations/[id]/members/route.ts b/apps/sim/app/api/v1/admin/organizations/[id]/members/route.ts index 797831b887..cc9cee6320 100644 --- a/apps/sim/app/api/v1/admin/organizations/[id]/members/route.ts +++ b/apps/sim/app/api/v1/admin/organizations/[id]/members/route.ts @@ -34,6 +34,7 @@ import { createLogger } from '@sim/logger' import { count, eq } from 'drizzle-orm' import { addUserToOrganization } from '@/lib/billing/organizations/membership' import { requireStripeClient } from '@/lib/billing/stripe-client' +import { isBillingEnabled } from '@/lib/core/config/feature-flags' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -221,14 +222,14 @@ export const POST = withAdminAuthParams(async (request, context) => userId: body.userId, organizationId, role: body.role, + skipBillingLogic: !isBillingEnabled, }) if (!result.success) { return badRequestResponse(result.error || 'Failed to add member') } - // Sync Pro subscription cancellation with Stripe (same as invitation flow) - if (result.billingActions.proSubscriptionToCancel?.stripeSubscriptionId) { + if (isBillingEnabled && result.billingActions.proSubscriptionToCancel?.stripeSubscriptionId) { try { const stripe = requireStripeClient() await stripe.subscriptions.update( diff --git a/apps/sim/app/api/v1/admin/organizations/route.ts b/apps/sim/app/api/v1/admin/organizations/route.ts index f19f822467..5cac5aba07 100644 --- a/apps/sim/app/api/v1/admin/organizations/route.ts +++ b/apps/sim/app/api/v1/admin/organizations/route.ts @@ -8,14 +8,32 @@ * - offset: number (default: 0) * * Response: AdminListResponse + * + * POST /api/v1/admin/organizations + * + * Create a new organization. + * + * Body: + * - name: string - Organization name (required) + * - slug: string - Organization slug (optional, auto-generated from name if not provided) + * - ownerId: string - User ID of the organization owner (required) + * + * Response: AdminSingleResponse */ +import { randomUUID } from 'crypto' import { db } from '@sim/db' -import { organization } from '@sim/db/schema' +import { member, organization, user } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { count } from 'drizzle-orm' +import { count, eq } from 'drizzle-orm' import { withAdminAuth } from '@/app/api/v1/admin/middleware' -import { internalErrorResponse, listResponse } from '@/app/api/v1/admin/responses' +import { + badRequestResponse, + internalErrorResponse, + listResponse, + notFoundResponse, + singleResponse, +} from '@/app/api/v1/admin/responses' import { type AdminOrganization, createPaginationMeta, @@ -47,3 +65,90 @@ export const GET = withAdminAuth(async (request) => { return internalErrorResponse('Failed to list organizations') } }) + +export const POST = withAdminAuth(async (request) => { + try { + const body = await request.json() + + if (!body.name || typeof body.name !== 'string' || body.name.trim().length === 0) { + return badRequestResponse('name is required') + } + + if (!body.ownerId || typeof body.ownerId !== 'string') { + return badRequestResponse('ownerId is required') + } + + const [ownerData] = await db + .select({ id: user.id, name: user.name }) + .from(user) + .where(eq(user.id, body.ownerId)) + .limit(1) + + if (!ownerData) { + return notFoundResponse('Owner user') + } + + const [existingMembership] = await db + .select({ organizationId: member.organizationId }) + .from(member) + .where(eq(member.userId, body.ownerId)) + .limit(1) + + if (existingMembership) { + return badRequestResponse( + 'User is already a member of another organization. Users can only belong to one organization at a time.' + ) + } + + const name = body.name.trim() + const slug = + body.slug?.trim() || + name + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-|-$/g, '') + + const organizationId = randomUUID() + const memberId = randomUUID() + const now = new Date() + + await db.transaction(async (tx) => { + await tx.insert(organization).values({ + id: organizationId, + name, + slug, + createdAt: now, + updatedAt: now, + }) + + await tx.insert(member).values({ + id: memberId, + userId: body.ownerId, + organizationId, + role: 'owner', + createdAt: now, + }) + }) + + const [createdOrg] = await db + .select() + .from(organization) + .where(eq(organization.id, organizationId)) + .limit(1) + + logger.info(`Admin API: Created organization ${organizationId}`, { + name, + slug, + ownerId: body.ownerId, + memberId, + }) + + return singleResponse({ + ...toAdminOrganization(createdOrg), + memberId, + }) + } catch (error) { + logger.error('Admin API: Failed to create organization', { error }) + return internalErrorResponse('Failed to create organization') + } +}) diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx index d458d683c7..a5c1eadeb4 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx @@ -1 +1,33 @@ -export { Knowledge as default } from './knowledge' +import { redirect } from 'next/navigation' +import { getSession } from '@/lib/auth' +import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' +import { getUserPermissionConfig } from '@/executor/utils/permission-check' +import { Knowledge } from './knowledge' + +interface KnowledgePageProps { + params: Promise<{ + workspaceId: string + }> +} + +export default async function KnowledgePage({ params }: KnowledgePageProps) { + const { workspaceId } = await params + const session = await getSession() + + if (!session?.user?.id) { + redirect('/') + } + + const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) + if (!hasPermission) { + redirect('/') + } + + // Check permission group restrictions + const permissionConfig = await getUserPermissionConfig(session.user.id) + if (permissionConfig?.hideKnowledgeBaseTab) { + redirect(`/workspace/${workspaceId}`) + } + + return +} diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx index 000e1be1ad..6bffda7c72 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx @@ -17,6 +17,7 @@ import { StatusBadge, TriggerBadge, } from '@/app/workspace/[workspaceId]/logs/utils' +import { usePermissionConfig } from '@/hooks/use-permission-config' import { formatCost } from '@/providers/utils' import type { WorkflowLog } from '@/stores/logs/filters/types' import { useLogDetailsUIStore } from '@/stores/logs/store' @@ -57,6 +58,7 @@ export const LogDetails = memo(function LogDetails({ const scrollAreaRef = useRef(null) const panelWidth = useLogDetailsUIStore((state) => state.panelWidth) const { handleMouseDown } = useLogDetailsResize() + const { config: permissionConfig } = usePermissionConfig() useEffect(() => { if (scrollAreaRef.current) { @@ -264,7 +266,7 @@ export const LogDetails = memo(function LogDetails({
    {/* Workflow State */} - {isWorkflowExecutionLog && log.executionId && ( + {isWorkflowExecutionLog && log.executionId && !permissionConfig.hideTraceSpans && (
    Workflow State @@ -282,12 +284,14 @@ export const LogDetails = memo(function LogDetails({ )} {/* Workflow Execution - Trace Spans */} - {isWorkflowExecutionLog && log.executionData?.traceSpans && ( - - )} + {isWorkflowExecutionLog && + log.executionData?.traceSpans && + !permissionConfig.hideTraceSpans && ( + + )} {/* Files */} {log.files && log.files.length > 0 && ( diff --git a/apps/sim/app/workspace/[workspaceId]/templates/page.tsx b/apps/sim/app/workspace/[workspaceId]/templates/page.tsx index 0e49d77b5e..9955c24331 100644 --- a/apps/sim/app/workspace/[workspaceId]/templates/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/templates/page.tsx @@ -6,6 +6,7 @@ import { getSession } from '@/lib/auth' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' import type { Template as WorkspaceTemplate } from '@/app/workspace/[workspaceId]/templates/templates' import Templates from '@/app/workspace/[workspaceId]/templates/templates' +import { getUserPermissionConfig } from '@/executor/utils/permission-check' interface TemplatesPageProps { params: Promise<{ @@ -32,6 +33,12 @@ export default async function TemplatesPage({ params }: TemplatesPageProps) { redirect('/') } + // Check permission group restrictions + const permissionConfig = await getUserPermissionConfig(session.user.id) + if (permissionConfig?.hideTemplates) { + redirect(`/workspace/${workspaceId}`) + } + // Determine effective super user (DB flag AND UI mode enabled) const currentUser = await db .select({ isSuperUser: user.isSuperUser }) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts index 79695526fb..476623e8b7 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data.ts @@ -3,6 +3,7 @@ import { useCallback, useEffect, useState } from 'react' import { createLogger } from '@sim/logger' import { useShallow } from 'zustand/react/shallow' +import { usePermissionConfig } from '@/hooks/use-permission-config' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store' @@ -92,6 +93,8 @@ interface UseMentionDataProps { export function useMentionData(props: UseMentionDataProps) { const { workflowId, workspaceId } = props + const { config, isBlockAllowed } = usePermissionConfig() + const [pastChats, setPastChats] = useState([]) const [isLoadingPastChats, setIsLoadingPastChats] = useState(false) @@ -101,6 +104,11 @@ export function useMentionData(props: UseMentionDataProps) { const [blocksList, setBlocksList] = useState([]) const [isLoadingBlocks, setIsLoadingBlocks] = useState(false) + // Reset blocks list when permission config changes + useEffect(() => { + setBlocksList([]) + }, [config.allowedIntegrations]) + const [templatesList, setTemplatesList] = useState([]) const [isLoadingTemplates, setIsLoadingTemplates] = useState(false) @@ -252,7 +260,13 @@ export function useMentionData(props: UseMentionDataProps) { const { getAllBlocks } = await import('@/blocks') const all = getAllBlocks() const regularBlocks = all - .filter((b: any) => b.type !== 'starter' && !b.hideFromToolbar && b.category === 'blocks') + .filter( + (b: any) => + b.type !== 'starter' && + !b.hideFromToolbar && + b.category === 'blocks' && + isBlockAllowed(b.type) + ) .map((b: any) => ({ id: b.type, name: b.name || b.type, @@ -262,7 +276,13 @@ export function useMentionData(props: UseMentionDataProps) { .sort((a: any, b: any) => a.name.localeCompare(b.name)) const toolBlocks = all - .filter((b: any) => b.type !== 'starter' && !b.hideFromToolbar && b.category === 'tools') + .filter( + (b: any) => + b.type !== 'starter' && + !b.hideFromToolbar && + b.category === 'tools' && + isBlockAllowed(b.type) + ) .map((b: any) => ({ id: b.type, name: b.name || b.type, @@ -276,7 +296,7 @@ export function useMentionData(props: UseMentionDataProps) { } finally { setIsLoadingBlocks(false) } - }, [isLoadingBlocks, blocksList.length]) + }, [isLoadingBlocks, blocksList.length, isBlockAllowed]) /** * Ensures templates are loaded diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/combobox/combobox.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/combobox/combobox.tsx index c5b8f67e2e..0565fb998c 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/combobox/combobox.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/combobox/combobox.tsx @@ -8,6 +8,8 @@ import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/c import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes' import type { SubBlockConfig } from '@/blocks/types' import { getDependsOnFields } from '@/blocks/utils' +import { usePermissionConfig } from '@/hooks/use-permission-config' +import { getProviderFromModel } from '@/providers/utils' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' @@ -132,10 +134,27 @@ export function ComboBox({ // Determine the active value based on mode (preview vs. controlled vs. store) const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue + // Permission-based filtering for model dropdowns + const { isProviderAllowed, isLoading: isPermissionLoading } = usePermissionConfig() + // Evaluate static options if provided as a function const staticOptions = useMemo(() => { - return typeof options === 'function' ? options() : options - }, [options]) + const opts = typeof options === 'function' ? options() : options + + if (subBlockId === 'model') { + return opts.filter((opt) => { + const modelId = typeof opt === 'string' ? opt : opt.id + try { + const providerId = getProviderFromModel(modelId) + return isProviderAllowed(providerId) + } catch { + return true + } + }) + } + + return opts + }, [options, subBlockId, isProviderAllowed]) // Normalize fetched options to match ComboBoxOption format const normalizedFetchedOptions = useMemo((): ComboBoxOption[] => { @@ -147,6 +166,18 @@ export function ComboBox({ let opts: ComboBoxOption[] = fetchOptions && normalizedFetchedOptions.length > 0 ? normalizedFetchedOptions : staticOptions + if (subBlockId === 'model' && fetchOptions && normalizedFetchedOptions.length > 0) { + opts = opts.filter((opt) => { + const modelId = typeof opt === 'string' ? opt : opt.id + try { + const providerId = getProviderFromModel(modelId) + return isProviderAllowed(providerId) + } catch { + return true + } + }) + } + // Merge hydrated option if not already present if (hydratedOption) { const alreadyPresent = opts.some((o) => @@ -158,7 +189,14 @@ export function ComboBox({ } return opts - }, [fetchOptions, normalizedFetchedOptions, staticOptions, hydratedOption]) + }, [ + fetchOptions, + normalizedFetchedOptions, + staticOptions, + hydratedOption, + subBlockId, + isProviderAllowed, + ]) // Convert options to Combobox format const comboboxOptions = useMemo((): ComboboxOption[] => { @@ -231,16 +269,34 @@ export function ComboBox({ setStoreInitialized(true) }, []) - // Set default value once store is initialized and value is undefined + // Check if current value is valid (exists in allowed options) + const isValueValid = useMemo(() => { + if (value === null || value === undefined) return false + return evaluatedOptions.some((opt) => getOptionValue(opt) === value) + }, [value, evaluatedOptions, getOptionValue]) + + // Set default value once store is initialized and permissions are loaded + // Also reset if current value becomes invalid (e.g., provider was blocked) useEffect(() => { - if ( - storeInitialized && - (value === null || value === undefined) && - defaultOptionValue !== undefined - ) { + if (isPermissionLoading) return + if (!storeInitialized) return + if (defaultOptionValue === undefined) return + + const needsDefault = value === null || value === undefined + const needsReset = subBlockId === 'model' && value && !isValueValid + + if (needsDefault || needsReset) { setStoreValue(defaultOptionValue) } - }, [storeInitialized, value, defaultOptionValue, setStoreValue]) + }, [ + storeInitialized, + value, + defaultOptionValue, + setStoreValue, + isPermissionLoading, + subBlockId, + isValueValid, + ]) // Clear fetched options and hydrated option when dependencies change useEffect(() => { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx index bc14069563..21c4b2aba2 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx @@ -7,6 +7,7 @@ import { useParams } from 'next/navigation' import { Badge, Combobox, + type ComboboxOption, type ComboboxOptionGroup, Popover, PopoverContent, @@ -59,6 +60,7 @@ import { import { useForceRefreshMcpTools, useMcpServers, useStoredMcpTools } from '@/hooks/queries/mcp' import { useWorkflows } from '@/hooks/queries/workflows' import { useMcpTools } from '@/hooks/use-mcp-tools' +import { usePermissionConfig } from '@/hooks/use-permission-config' import { getProviderFromModel, supportsToolUsageControl } from '@/providers/utils' import { useSettingsModalStore } from '@/stores/settings-modal/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' @@ -1009,18 +1011,23 @@ export function ToolInput({ const provider = model ? getProviderFromModel(model) : '' const supportsToolControl = provider ? supportsToolUsageControl(provider) : false - const toolBlocks = getAllBlocks().filter( - (block) => - (block.category === 'tools' || - block.type === 'api' || - block.type === 'webhook_request' || - block.type === 'workflow' || - block.type === 'knowledge' || - block.type === 'function') && - block.type !== 'evaluator' && - block.type !== 'mcp' && - block.type !== 'file' - ) + const { filterBlocks, config: permissionConfig } = usePermissionConfig() + + const toolBlocks = useMemo(() => { + const allToolBlocks = getAllBlocks().filter( + (block) => + (block.category === 'tools' || + block.type === 'api' || + block.type === 'webhook_request' || + block.type === 'workflow' || + block.type === 'knowledge' || + block.type === 'function') && + block.type !== 'evaluator' && + block.type !== 'mcp' && + block.type !== 'file' + ) + return filterBlocks(allToolBlocks) + }, [filterBlocks]) const customFilter = useCallback((value: string, search: string) => { if (!search.trim()) return 1 @@ -1608,33 +1615,37 @@ export function ToolInput({ const groups: ComboboxOptionGroup[] = [] // Actions group (no section header) - groups.push({ - items: [ - { - label: 'Create Tool', - value: 'action-create-tool', - icon: WrenchIcon, - onSelect: () => { - setCustomToolModalOpen(true) - setOpen(false) - }, - disabled: isPreview, + const actionItems: ComboboxOption[] = [] + if (!permissionConfig.disableCustomTools) { + actionItems.push({ + label: 'Create Tool', + value: 'action-create-tool', + icon: WrenchIcon, + onSelect: () => { + setCustomToolModalOpen(true) + setOpen(false) }, - { - label: 'Add MCP Server', - value: 'action-add-mcp', - icon: McpIcon, - onSelect: () => { - setOpen(false) - window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'mcp' } })) - }, - disabled: isPreview, + disabled: isPreview, + }) + } + if (!permissionConfig.disableMcpTools) { + actionItems.push({ + label: 'Add MCP Server', + value: 'action-add-mcp', + icon: McpIcon, + onSelect: () => { + setOpen(false) + window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'mcp' } })) }, - ], - }) + disabled: isPreview, + }) + } + if (actionItems.length > 0) { + groups.push({ items: actionItems }) + } // Custom Tools section - if (customTools.length > 0) { + if (!permissionConfig.disableCustomTools && customTools.length > 0) { groups.push({ section: 'Custom Tools', items: customTools.map((customTool) => ({ @@ -1659,7 +1670,7 @@ export function ToolInput({ } // MCP Tools section - if (availableMcpTools.length > 0) { + if (!permissionConfig.disableMcpTools && availableMcpTools.length > 0) { groups.push({ section: 'MCP Tools', items: availableMcpTools.map((mcpTool) => { @@ -1736,6 +1747,8 @@ export function ToolInput({ setStoreValue, handleMcpToolSelect, handleSelectTool, + permissionConfig.disableCustomTools, + permissionConfig.disableMcpTools, ]) const toolRequiresOAuth = (toolId: string): boolean => { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/toolbar.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/toolbar.tsx index 76567908c5..86fa30f4e6 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/toolbar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/toolbar/toolbar.tsx @@ -26,6 +26,7 @@ import { import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config' import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config' import type { BlockConfig } from '@/blocks/types' +import { usePermissionConfig } from '@/hooks/use-permission-config' import { useToolbarStore } from '@/stores/panel/toolbar/store' interface BlockItem { @@ -206,9 +207,16 @@ export const Toolbar = forwardRef(function Toolbar( triggersHeaderRef, }) + // Permission config for filtering + const { filterBlocks } = usePermissionConfig() + // Get static data (computed once and cached) - const triggers = getTriggers() - const blocks = getBlocks() + const allTriggers = getTriggers() + const allBlocks = getBlocks() + + // Apply permission-based filtering to blocks and triggers + const blocks = useMemo(() => filterBlocks(allBlocks), [filterBlocks, allBlocks]) + const triggers = useMemo(() => filterBlocks(allTriggers), [filterBlocks, allTriggers]) // Determine if triggers are at minimum height (blocks are fully expanded) const isTriggersAtMinimum = toolbarTriggersHeight <= TRIGGERS_MIN_THRESHOLD diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx index 7dda151af0..c6fb11539d 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel.tsx @@ -40,6 +40,7 @@ import { Variables } from '@/app/workspace/[workspaceId]/w/[workflowId]/componen import { useAutoLayout } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-auto-layout' import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution' import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks' +import { usePermissionConfig } from '@/hooks/use-permission-config' import { useChatStore } from '@/stores/chat/store' import { usePanelStore } from '@/stores/panel/store' import type { PanelTab } from '@/stores/panel/types' @@ -92,6 +93,7 @@ export function Panel() { // Hooks const userPermissions = useUserPermissionsContext() + const { config: permissionConfig } = usePermissionConfig() const { isImporting, handleFileChange } = useImportWorkflow({ workspaceId }) const { workflows, activeWorkflowId, duplicateWorkflow, hydration } = useWorkflowRegistry() const isRegistryLoading = @@ -438,18 +440,20 @@ export function Panel() { {/* Tabs */}
    - + {!permissionConfig.hideCopilot && ( + + )} +
    + +
    + {filteredMembers.length === 0 ? ( +

    + No members found matching "{searchTerm}" +

    + ) : ( +
    + {filteredMembers.map((member: any) => { + const name = member.user?.name || 'Unknown' + const email = member.user?.email || '' + const avatarInitial = name.charAt(0).toUpperCase() + const isSelected = selectedMemberIds.has(member.userId) + + return ( + + ) + })} +
    + )} +
    +
    + )} + + + + + + + + ) +} + +function AccessControlSkeleton() { + return ( +
    +
    + +
    +
    + +
    + + +
    +
    + +
    +
    +
    + ) +} + +export function AccessControl() { + const { data: session } = useSession() + const { data: organizationsData, isPending: orgsLoading } = useOrganizations() + const { data: subscriptionData, isPending: subLoading } = useSubscriptionData() + + const activeOrganization = organizationsData?.activeOrganization + const subscriptionStatus = getSubscriptionStatus(subscriptionData?.data) + const hasEnterprisePlan = subscriptionStatus.isEnterprise + const userRole = getUserRole(activeOrganization, session?.user?.email) + const isOwner = userRole === 'owner' + const isAdmin = userRole === 'admin' + const isOrgAdminOrOwner = isOwner || isAdmin + const canManage = hasEnterprisePlan && isOrgAdminOrOwner && !!activeOrganization?.id + + const queryEnabled = !!activeOrganization?.id + const { data: permissionGroups = [], isPending: groupsLoading } = usePermissionGroups( + activeOrganization?.id, + queryEnabled + ) + + // Show loading while dependencies load, or while permission groups query is pending + const isLoading = orgsLoading || subLoading || (queryEnabled && groupsLoading) + const { data: organization } = useOrganization(activeOrganization?.id || '') + + const createPermissionGroup = useCreatePermissionGroup() + const updatePermissionGroup = useUpdatePermissionGroup() + const deletePermissionGroup = useDeletePermissionGroup() + const bulkAddMembers = useBulkAddPermissionGroupMembers() + + const [searchTerm, setSearchTerm] = useState('') + const [showCreateModal, setShowCreateModal] = useState(false) + const [viewingGroup, setViewingGroup] = useState(null) + const [newGroupName, setNewGroupName] = useState('') + const [newGroupDescription, setNewGroupDescription] = useState('') + const [createError, setCreateError] = useState(null) + const [deletingGroup, setDeletingGroup] = useState<{ id: string; name: string } | null>(null) + const [deletingGroupIds, setDeletingGroupIds] = useState>(new Set()) + + const { data: members = [], isPending: membersLoading } = usePermissionGroupMembers( + viewingGroup?.id + ) + const removeMember = useRemovePermissionGroupMember() + + const [showConfigModal, setShowConfigModal] = useState(false) + const [editingConfig, setEditingConfig] = useState(null) + const [showAddMembersModal, setShowAddMembersModal] = useState(false) + const [selectedMemberIds, setSelectedMemberIds] = useState>(new Set()) + const [providerSearchTerm, setProviderSearchTerm] = useState('') + const [integrationSearchTerm, setIntegrationSearchTerm] = useState('') + const [platformSearchTerm, setPlatformSearchTerm] = useState('') + const [showUnsavedChanges, setShowUnsavedChanges] = useState(false) + + const platformFeatures = useMemo( + () => [ + { + id: 'hide-knowledge-base', + label: 'Knowledge Base', + category: 'Sidebar', + configKey: 'hideKnowledgeBaseTab' as const, + }, + { + id: 'hide-templates', + label: 'Templates', + category: 'Sidebar', + configKey: 'hideTemplates' as const, + }, + { + id: 'hide-copilot', + label: 'Copilot', + category: 'Workflow Panel', + configKey: 'hideCopilot' as const, + }, + { + id: 'hide-api-keys', + label: 'API Keys', + category: 'Settings Tabs', + configKey: 'hideApiKeysTab' as const, + }, + { + id: 'hide-environment', + label: 'Environment', + category: 'Settings Tabs', + configKey: 'hideEnvironmentTab' as const, + }, + { + id: 'hide-files', + label: 'Files', + category: 'Settings Tabs', + configKey: 'hideFilesTab' as const, + }, + { + id: 'disable-mcp', + label: 'MCP Tools', + category: 'Tools', + configKey: 'disableMcpTools' as const, + }, + { + id: 'disable-custom-tools', + label: 'Custom Tools', + category: 'Tools', + configKey: 'disableCustomTools' as const, + }, + { + id: 'hide-trace-spans', + label: 'Trace Spans', + category: 'Logs', + configKey: 'hideTraceSpans' as const, + }, + ], + [] + ) + + const filteredPlatformFeatures = useMemo(() => { + if (!platformSearchTerm.trim()) return platformFeatures + const search = platformSearchTerm.toLowerCase() + return platformFeatures.filter( + (f) => f.label.toLowerCase().includes(search) || f.category.toLowerCase().includes(search) + ) + }, [platformFeatures, platformSearchTerm]) + + const platformCategories = useMemo(() => { + const categories: Record = {} + for (const feature of filteredPlatformFeatures) { + if (!categories[feature.category]) { + categories[feature.category] = [] + } + categories[feature.category].push(feature) + } + return categories + }, [filteredPlatformFeatures]) + + const hasConfigChanges = useMemo(() => { + if (!viewingGroup || !editingConfig) return false + const original = viewingGroup.config + return JSON.stringify(original) !== JSON.stringify(editingConfig) + }, [viewingGroup, editingConfig]) + + const allBlocks = useMemo(() => { + // Filter out hidden blocks and start_trigger (which should never be disabled) + const blocks = getAllBlocks().filter((b) => !b.hideFromToolbar && b.type !== 'start_trigger') + return blocks.sort((a, b) => { + // Group by category: triggers first, then blocks, then tools + const categoryOrder = { triggers: 0, blocks: 1, tools: 2 } + const catA = categoryOrder[a.category] ?? 3 + const catB = categoryOrder[b.category] ?? 3 + if (catA !== catB) return catA - catB + return a.name.localeCompare(b.name) + }) + }, []) + const allProviderIds = useMemo(() => getAllProviderIds(), []) + + const filteredProviders = useMemo(() => { + if (!providerSearchTerm.trim()) return allProviderIds + const query = providerSearchTerm.toLowerCase() + return allProviderIds.filter((id) => id.toLowerCase().includes(query)) + }, [allProviderIds, providerSearchTerm]) + + const filteredBlocks = useMemo(() => { + if (!integrationSearchTerm.trim()) return allBlocks + const query = integrationSearchTerm.toLowerCase() + return allBlocks.filter((b) => b.name.toLowerCase().includes(query)) + }, [allBlocks, integrationSearchTerm]) + + const orgMembers = useMemo(() => { + return organization?.members || [] + }, [organization]) + + const filteredGroups = useMemo(() => { + if (!searchTerm.trim()) return permissionGroups + const searchLower = searchTerm.toLowerCase() + return permissionGroups.filter((g) => g.name.toLowerCase().includes(searchLower)) + }, [permissionGroups, searchTerm]) + + const handleCreatePermissionGroup = useCallback(async () => { + if (!newGroupName.trim() || !activeOrganization?.id) return + setCreateError(null) + try { + const result = await createPermissionGroup.mutateAsync({ + organizationId: activeOrganization.id, + name: newGroupName.trim(), + description: newGroupDescription.trim() || undefined, + }) + setShowCreateModal(false) + setNewGroupName('') + setNewGroupDescription('') + } catch (error) { + logger.error('Failed to create permission group', error) + if (error instanceof Error) { + setCreateError(error.message) + } else { + setCreateError('Failed to create permission group') + } + } + }, [newGroupName, newGroupDescription, activeOrganization?.id, createPermissionGroup]) + + const handleCloseCreateModal = useCallback(() => { + setShowCreateModal(false) + setNewGroupName('') + setNewGroupDescription('') + setCreateError(null) + }, []) + + const handleBackToList = useCallback(() => { + setViewingGroup(null) + }, []) + + const handleDeleteClick = useCallback((group: PermissionGroup) => { + setDeletingGroup({ id: group.id, name: group.name }) + }, []) + + const confirmDelete = useCallback(async () => { + if (!deletingGroup || !activeOrganization?.id) return + setDeletingGroupIds((prev) => new Set(prev).add(deletingGroup.id)) + try { + await deletePermissionGroup.mutateAsync({ + permissionGroupId: deletingGroup.id, + organizationId: activeOrganization.id, + }) + setDeletingGroup(null) + if (viewingGroup?.id === deletingGroup.id) { + setViewingGroup(null) + } + } catch (error) { + logger.error('Failed to delete permission group', error) + } finally { + setDeletingGroupIds((prev) => { + const next = new Set(prev) + next.delete(deletingGroup.id) + return next + }) + } + }, [deletingGroup, activeOrganization?.id, deletePermissionGroup, viewingGroup?.id]) + + const handleRemoveMember = useCallback( + async (memberId: string) => { + if (!viewingGroup) return + try { + await removeMember.mutateAsync({ + permissionGroupId: viewingGroup.id, + memberId, + }) + } catch (error) { + logger.error('Failed to remove member', error) + } + }, + [viewingGroup, removeMember] + ) + + const handleOpenConfigModal = useCallback(() => { + if (!viewingGroup) return + setEditingConfig({ ...viewingGroup.config }) + setShowConfigModal(true) + }, [viewingGroup]) + + const handleSaveConfig = useCallback(async () => { + if (!viewingGroup || !editingConfig || !activeOrganization?.id) return + try { + await updatePermissionGroup.mutateAsync({ + id: viewingGroup.id, + organizationId: activeOrganization.id, + config: editingConfig, + }) + setShowConfigModal(false) + setEditingConfig(null) + setProviderSearchTerm('') + setIntegrationSearchTerm('') + setPlatformSearchTerm('') + setViewingGroup((prev) => (prev ? { ...prev, config: editingConfig } : null)) + } catch (error) { + logger.error('Failed to update config', error) + } + }, [viewingGroup, editingConfig, activeOrganization?.id, updatePermissionGroup]) + + const handleOpenAddMembersModal = useCallback(() => { + const existingMemberUserIds = new Set(members.map((m) => m.userId)) + setSelectedMemberIds(new Set()) + setShowAddMembersModal(true) + }, [members]) + + const handleAddSelectedMembers = useCallback(async () => { + if (!viewingGroup || selectedMemberIds.size === 0) return + try { + await bulkAddMembers.mutateAsync({ + permissionGroupId: viewingGroup.id, + userIds: Array.from(selectedMemberIds), + }) + setShowAddMembersModal(false) + setSelectedMemberIds(new Set()) + } catch (error) { + logger.error('Failed to add members', error) + } + }, [viewingGroup, selectedMemberIds, bulkAddMembers]) + + const toggleIntegration = useCallback( + (blockType: string) => { + if (!editingConfig) return + const current = editingConfig.allowedIntegrations + if (current === null) { + const allExcept = allBlocks.map((b) => b.type).filter((t) => t !== blockType) + setEditingConfig({ ...editingConfig, allowedIntegrations: allExcept }) + } else if (current.includes(blockType)) { + const updated = current.filter((t) => t !== blockType) + setEditingConfig({ + ...editingConfig, + allowedIntegrations: updated.length === allBlocks.length ? null : updated, + }) + } else { + const updated = [...current, blockType] + setEditingConfig({ + ...editingConfig, + allowedIntegrations: updated.length === allBlocks.length ? null : updated, + }) + } + }, + [editingConfig, allBlocks] + ) + + const toggleProvider = useCallback( + (providerId: string) => { + if (!editingConfig) return + const current = editingConfig.allowedModelProviders + if (current === null) { + const allExcept = allProviderIds.filter((p) => p !== providerId) + setEditingConfig({ ...editingConfig, allowedModelProviders: allExcept }) + } else if (current.includes(providerId)) { + const updated = current.filter((p) => p !== providerId) + setEditingConfig({ + ...editingConfig, + allowedModelProviders: updated.length === allProviderIds.length ? null : updated, + }) + } else { + const updated = [...current, providerId] + setEditingConfig({ + ...editingConfig, + allowedModelProviders: updated.length === allProviderIds.length ? null : updated, + }) + } + }, + [editingConfig, allProviderIds] + ) + + const isIntegrationAllowed = useCallback( + (blockType: string) => { + if (!editingConfig) return true + return ( + editingConfig.allowedIntegrations === null || + editingConfig.allowedIntegrations.includes(blockType) + ) + }, + [editingConfig] + ) + + const isProviderAllowed = useCallback( + (providerId: string) => { + if (!editingConfig) return true + return ( + editingConfig.allowedModelProviders === null || + editingConfig.allowedModelProviders.includes(providerId) + ) + }, + [editingConfig] + ) + + const availableMembersToAdd = useMemo(() => { + const existingMemberUserIds = new Set(members.map((m) => m.userId)) + return orgMembers.filter((m: any) => !existingMemberUserIds.has(m.userId)) + }, [orgMembers, members]) + + if (isLoading) { + return + } + + if (viewingGroup) { + return ( + <> +
    +
    +
    +

    + {viewingGroup.name} +

    + +
    + {viewingGroup.description && ( +

    {viewingGroup.description}

    + )} +
    + +
    +
    +
    + + Members + + +
    + + {membersLoading ? ( +
    + {[1, 2].map((i) => ( +
    +
    + +
    + + +
    +
    +
    + ))} +
    + ) : members.length === 0 ? ( +

    + No members yet. Click "Add" to get started. +

    + ) : ( +
    + {members.map((member) => { + const name = member.userName || 'Unknown' + const avatarInitial = name.charAt(0).toUpperCase() + + return ( +
    +
    + + {member.userImage && } + + {avatarInitial} + + + +
    +
    + + {name} + +
    +
    + {member.userEmail} +
    +
    +
    + + +
    + ) + })} +
    + )} +
    +
    + +
    + +
    +
    + + { + if (!open && hasConfigChanges) { + setShowUnsavedChanges(true) + } else { + setShowConfigModal(open) + if (!open) { + setProviderSearchTerm('') + setIntegrationSearchTerm('') + setPlatformSearchTerm('') + } + } + }} + > + + Configure Permissions + + + Model Providers + Blocks + Platform + + + + +
    +
    +
    + + setProviderSearchTerm(e.target.value)} + className='h-auto flex-1 border-0 bg-transparent p-0 font-base text-[13px] leading-none placeholder:text-[var(--text-tertiary)] focus-visible:ring-0 focus-visible:ring-offset-0' + /> +
    + +
    +
    + {filteredProviders.map((providerId) => { + const ProviderIcon = PROVIDER_DEFINITIONS[providerId]?.icon + const providerName = + PROVIDER_DEFINITIONS[providerId]?.name || + providerId.replace(/-/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase()) + return ( +
    + toggleProvider(providerId)} + /> +
    + {ProviderIcon && } +
    + {providerName} +
    + ) + })} +
    +
    +
    +
    + + + +
    +
    +
    + + setIntegrationSearchTerm(e.target.value)} + className='h-auto flex-1 border-0 bg-transparent p-0 font-base text-[13px] leading-none placeholder:text-[var(--text-tertiary)] focus-visible:ring-0 focus-visible:ring-offset-0' + /> +
    + +
    +
    + {filteredBlocks.map((block) => { + const BlockIcon = block.icon + return ( +
    + toggleIntegration(block.type)} + /> +
    + {BlockIcon && ( + + )} +
    + {block.name} +
    + ) + })} +
    +
    +
    +
    + + + +
    +
    +
    + + setPlatformSearchTerm(e.target.value)} + className='h-auto flex-1 border-0 bg-transparent p-0 font-base text-[13px] leading-none placeholder:text-[var(--text-tertiary)] focus-visible:ring-0 focus-visible:ring-offset-0' + /> +
    + +
    +
    + {Object.entries(platformCategories).map(([category, features]) => ( +
    + + {category} + +
    + {features.map((feature) => ( +
    + + setEditingConfig((prev) => + prev + ? { ...prev, [feature.configKey]: checked !== true } + : prev + ) + } + /> + +
    + ))} +
    +
    + ))} +
    +
    +
    +
    +
    + + + + +
    +
    + + + + Unsaved Changes + +

    + You have unsaved changes. Do you want to save them before closing? +

    +
    + + + + +
    +
    + + + + ) + } + + return ( + <> +
    +
    +
    + + setSearchTerm(e.target.value)} + className='h-auto flex-1 border-0 bg-transparent p-0 font-base leading-none placeholder:text-[var(--text-tertiary)] focus-visible:ring-0 focus-visible:ring-offset-0' + /> +
    + +
    + +
    + {filteredGroups.length === 0 && searchTerm.trim() ? ( +
    + No results found matching "{searchTerm}" +
    + ) : permissionGroups.length === 0 ? ( +
    + Click "Create" above to get started +
    + ) : ( +
    + {filteredGroups.map((group) => ( +
    +
    + {group.name} + + {group.memberCount} member{group.memberCount !== 1 ? 's' : ''} + +
    +
    + + +
    +
    + ))} +
    + )} +
    +
    + + + + Create Permission Group + +
    +
    + + { + setNewGroupName(e.target.value) + if (createError) setCreateError(null) + }} + placeholder='e.g., Marketing Team' + /> +
    +
    + + setNewGroupDescription(e.target.value)} + placeholder='e.g., Limited access for marketing users' + /> +
    + {createError &&

    {createError}

    } +
    +
    + + + + +
    +
    + + setDeletingGroup(null)}> + + Delete Permission Group + +

    + Are you sure you want to delete{' '} + {deletingGroup?.name}? + All members will be removed from this group.{' '} + This action cannot be undone. +

    +
    + + + + +
    +
    + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx index 9dba88d0fe..95b1346b83 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx @@ -631,7 +631,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) { Cancel
    {existingKey ? (
    -
    -
    - -
    - ) -} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/index.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/index.ts index 5862dd823b..a044207bda 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/index.ts @@ -1,12 +1,2 @@ export { FormField } from './form-field/form-field' -export { FormattedInput } from './formatted-input/formatted-input' -export { HeaderRow } from './header-row/header-row' export { McpServerSkeleton } from './mcp-server-skeleton/mcp-server-skeleton' -export { formatTransportLabel, ServerListItem } from './server-list-item/server-list-item' -export type { - EnvVarDropdownConfig, - HeaderEntry, - InputFieldType, - McpServerFormData, - McpServerTestResult, -} from './types' diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/server-list-item/server-list-item.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/server-list-item/server-list-item.tsx deleted file mode 100644 index c5ad6f8098..0000000000 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/server-list-item/server-list-item.tsx +++ /dev/null @@ -1,76 +0,0 @@ -import { Button } from '@/components/emcn' - -export function formatTransportLabel(transport: string): string { - return transport - .split('-') - .map((word) => - ['http', 'sse', 'stdio'].includes(word.toLowerCase()) - ? word.toUpperCase() - : word.charAt(0).toUpperCase() + word.slice(1) - ) - .join('-') -} - -function formatToolsLabel(tools: any[], connectionStatus?: string): string { - if (connectionStatus === 'error') { - return 'Unable to connect' - } - const count = tools.length - const plural = count !== 1 ? 's' : '' - const names = count > 0 ? `: ${tools.map((t) => t.name).join(', ')}` : '' - return `${count} tool${plural}${names}` -} - -interface ServerListItemProps { - server: any - tools: any[] - isDeleting: boolean - isLoadingTools?: boolean - isRefreshing?: boolean - onRemove: () => void - onViewDetails: () => void -} - -export function ServerListItem({ - server, - tools, - isDeleting, - isLoadingTools = false, - isRefreshing = false, - onRemove, - onViewDetails, -}: ServerListItemProps) { - const transportLabel = formatTransportLabel(server.transport || 'http') - const toolsLabel = formatToolsLabel(tools, server.connectionStatus) - const isError = server.connectionStatus === 'error' - - return ( -
    -
    -
    - - {server.name || 'Unnamed Server'} - - ({transportLabel}) -
    -

    - {isRefreshing - ? 'Refreshing...' - : isLoadingTools && tools.length === 0 - ? 'Loading...' - : toolsLabel} -

    -
    -
    - - -
    -
    - ) -} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/types.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/types.ts deleted file mode 100644 index 5b1bfd90d9..0000000000 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/components/types.ts +++ /dev/null @@ -1,35 +0,0 @@ -import type { McpTransport } from '@/lib/mcp/types' - -/** - * Represents a single header entry in the form. - * Using an array of objects allows duplicate keys during editing. - */ -export interface HeaderEntry { - key: string - value: string -} - -export interface McpServerFormData { - name: string - transport: McpTransport - url?: string - timeout?: number - headers?: HeaderEntry[] -} - -export interface McpServerTestResult { - success: boolean - message?: string - error?: string - warnings?: string[] -} - -export type InputFieldType = 'url' | 'header-key' | 'header-value' - -export interface EnvVarDropdownConfig { - searchTerm: string - cursorPosition: number - workspaceId: string - onSelect: (value: string) => void - onClose: () => void -} diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/mcp.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/mcp.tsx index beab9c41a5..c4b8aa9d0f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/mcp.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/mcp/mcp.tsx @@ -2,7 +2,7 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { Plus, Search } from 'lucide-react' +import { Plus, Search, X } from 'lucide-react' import { useParams } from 'next/navigation' import { Badge, @@ -16,13 +16,19 @@ import { Tooltip, } from '@/components/emcn' import { Input } from '@/components/ui' +import { cn } from '@/lib/core/utils/cn' import { getIssueBadgeLabel, getIssueBadgeVariant, getMcpToolIssue, type McpToolIssue, } from '@/lib/mcp/tool-validation' -import { checkEnvVarTrigger } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/env-var-dropdown' +import type { McpTransport } from '@/lib/mcp/types' +import { + checkEnvVarTrigger, + EnvVarDropdown, +} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/env-var-dropdown' +import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text' import { useCreateMcpServer, useDeleteMcpServer, @@ -35,15 +41,41 @@ import { import { useMcpServerTest } from '@/hooks/use-mcp-server-test' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' -import type { InputFieldType, McpServerFormData, McpServerTestResult } from './components' -import { - FormattedInput, - FormField, - formatTransportLabel, - HeaderRow, - McpServerSkeleton, - ServerListItem, -} from './components' +import { FormField, McpServerSkeleton } from './components' + +/** + * Represents a single header entry in the form. + * Using an array of objects allows duplicate keys during editing. + */ +interface HeaderEntry { + key: string + value: string +} + +interface McpServerFormData { + name: string + transport: McpTransport + url?: string + timeout?: number + headers?: HeaderEntry[] +} + +interface McpServerTestResult { + success: boolean + message?: string + error?: string + warnings?: string[] +} + +type InputFieldType = 'url' | 'header-key' | 'header-value' + +interface EnvVarDropdownConfig { + searchTerm: string + cursorPosition: number + workspaceId: string + onSelect: (value: string) => void + onClose: () => void +} interface McpTool { name: string @@ -71,6 +103,33 @@ const DEFAULT_FORM_DATA: McpServerFormData = { headers: [{ key: '', value: '' }], } +/** + * Formats a transport type string for display. + */ +function formatTransportLabel(transport: string): string { + return transport + .split('-') + .map((word) => + ['http', 'sse', 'stdio'].includes(word.toLowerCase()) + ? word.toUpperCase() + : word.charAt(0).toUpperCase() + word.slice(1) + ) + .join('-') +} + +/** + * Formats a tools list for display in the server list. + */ +function formatToolsLabel(tools: McpTool[], connectionStatus?: string): string { + if (connectionStatus === 'error') { + return 'Unable to connect' + } + const count = tools.length + const plural = count !== 1 ? 's' : '' + const names = count > 0 ? `: ${tools.map((t) => t.name).join(', ')}` : '' + return `${count} tool${plural}${names}` +} + /** * Determines the label for the test connection button based on current state. */ @@ -84,6 +143,198 @@ function getTestButtonLabel( return 'Test Connection' } +interface FormattedInputProps { + ref?: React.RefObject + placeholder: string + value: string + scrollLeft: number + showEnvVars: boolean + envVarProps: EnvVarDropdownConfig + className?: string + onChange: (e: React.ChangeEvent) => void + onScroll: (scrollLeft: number) => void +} + +function FormattedInput({ + ref, + placeholder, + value, + scrollLeft, + showEnvVars, + envVarProps, + className, + onChange, + onScroll, +}: FormattedInputProps) { + const handleScroll = (e: React.UIEvent) => { + onScroll(e.currentTarget.scrollLeft) + } + + return ( +
    + +
    +
    + {formatDisplayText(value)} +
    +
    + {showEnvVars && ( + + )} +
    + ) +} + +interface HeaderRowProps { + header: HeaderEntry + index: number + headerScrollLeft: Record + showEnvVars: boolean + activeInputField: InputFieldType | null + activeHeaderIndex: number | null + envSearchTerm: string + cursorPosition: number + workspaceId: string + onInputChange: (field: InputFieldType, value: string, index?: number) => void + onHeaderScroll: (key: string, scrollLeft: number) => void + onEnvVarSelect: (value: string) => void + onEnvVarClose: () => void + onRemove: () => void +} + +function HeaderRow({ + header, + index, + headerScrollLeft, + showEnvVars, + activeInputField, + activeHeaderIndex, + envSearchTerm, + cursorPosition, + workspaceId, + onInputChange, + onHeaderScroll, + onEnvVarSelect, + onEnvVarClose, + onRemove, +}: HeaderRowProps) { + const isKeyActive = + showEnvVars && activeInputField === 'header-key' && activeHeaderIndex === index + const isValueActive = + showEnvVars && activeInputField === 'header-value' && activeHeaderIndex === index + + const envVarProps: EnvVarDropdownConfig = { + searchTerm: envSearchTerm, + cursorPosition, + workspaceId, + onSelect: onEnvVarSelect, + onClose: onEnvVarClose, + } + + return ( +
    + onInputChange('header-key', e.target.value, index)} + onScroll={(scrollLeft) => onHeaderScroll(`key-${index}`, scrollLeft)} + /> + + onInputChange('header-value', e.target.value, index)} + onScroll={(scrollLeft) => onHeaderScroll(`value-${index}`, scrollLeft)} + /> + + +
    + ) +} + +interface ServerListItemProps { + server: McpServer + tools: McpTool[] + isDeleting: boolean + isLoadingTools?: boolean + isRefreshing?: boolean + onRemove: () => void + onViewDetails: () => void +} + +function ServerListItem({ + server, + tools, + isDeleting, + isLoadingTools = false, + isRefreshing = false, + onRemove, + onViewDetails, +}: ServerListItemProps) { + const transportLabel = formatTransportLabel(server.transport || 'http') + const toolsLabel = formatToolsLabel(tools, server.connectionStatus) + const isError = server.connectionStatus === 'error' + + return ( +
    +
    +
    + + {server.name || 'Unnamed Server'} + + ({transportLabel}) +
    +

    + {isRefreshing + ? 'Refreshing...' + : isLoadingTools && tools.length === 0 + ? 'Loading...' + : toolsLabel} +

    +
    +
    + + +
    +
    + ) +} + interface MCPProps { initialServerId?: string | null } diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/workflow-mcp-servers/workflow-mcp-servers.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/workflow-mcp-servers/workflow-mcp-servers.tsx index 8736bf59d0..0c1b6a4efe 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/workflow-mcp-servers/workflow-mcp-servers.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/workflow-mcp-servers/workflow-mcp-servers.tsx @@ -208,11 +208,11 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro

    -
    - + ) + } +) + +BrandedButton.displayName = 'BrandedButton' diff --git a/apps/sim/app/(auth)/components/sso-login-button.tsx b/apps/sim/app/(auth)/components/sso-login-button.tsx index 395a8d4541..df758576c2 100644 --- a/apps/sim/app/(auth)/components/sso-login-button.tsx +++ b/apps/sim/app/(auth)/components/sso-login-button.tsx @@ -34,7 +34,7 @@ export function SSOLoginButton({ } const primaryBtnClasses = cn( - primaryClassName || 'auth-button-gradient', + primaryClassName || 'branded-button-gradient', 'flex w-full items-center justify-center gap-2 rounded-[10px] border font-medium text-[15px] text-white transition-all duration-200' ) diff --git a/apps/sim/app/(auth)/components/status-page-layout.tsx b/apps/sim/app/(auth)/components/status-page-layout.tsx new file mode 100644 index 0000000000..d3177b8754 --- /dev/null +++ b/apps/sim/app/(auth)/components/status-page-layout.tsx @@ -0,0 +1,74 @@ +'use client' + +import type { ReactNode } from 'react' +import { inter } from '@/app/_styles/fonts/inter/inter' +import { soehne } from '@/app/_styles/fonts/soehne/soehne' +import AuthBackground from '@/app/(auth)/components/auth-background' +import Nav from '@/app/(landing)/components/nav/nav' +import { SupportFooter } from './support-footer' + +export interface StatusPageLayoutProps { + /** Page title displayed prominently */ + title: string + /** Description text below the title */ + description: string | ReactNode + /** Content to render below the title/description (usually buttons) */ + children?: ReactNode + /** Whether to show the support footer (default: true) */ + showSupportFooter?: boolean + /** Whether to hide the nav bar (useful for embedded forms) */ + hideNav?: boolean +} + +/** + * Unified layout for status/error pages (404, form unavailable, chat error, etc.). + * Uses AuthBackground and Nav for consistent styling with auth pages. + * + * @example + * ```tsx + * + * router.push('/')}>Return to Home + * + * ``` + */ +export function StatusPageLayout({ + title, + description, + children, + showSupportFooter = true, + hideNav = false, +}: StatusPageLayoutProps) { + return ( + +
    + {!hideNav &&
    +
    + ) +} diff --git a/apps/sim/app/(auth)/components/support-footer.tsx b/apps/sim/app/(auth)/components/support-footer.tsx new file mode 100644 index 0000000000..057334ee5f --- /dev/null +++ b/apps/sim/app/(auth)/components/support-footer.tsx @@ -0,0 +1,40 @@ +'use client' + +import { useBrandConfig } from '@/lib/branding/branding' +import { inter } from '@/app/_styles/fonts/inter/inter' + +export interface SupportFooterProps { + /** Position style - 'fixed' for pages without AuthLayout, 'absolute' for pages with AuthLayout */ + position?: 'fixed' | 'absolute' +} + +/** + * Support footer component for auth and status pages. + * Displays a "Need help? Contact support" link using branded support email. + * + * @example + * ```tsx + * // Fixed position (for standalone pages) + * + * + * // Absolute position (for pages using AuthLayout) + * + * ``` + */ +export function SupportFooter({ position = 'fixed' }: SupportFooterProps) { + const brandConfig = useBrandConfig() + + return ( +
    + Need help?{' '} + + Contact support + +
    + ) +} diff --git a/apps/sim/app/(auth)/login/login-form.tsx b/apps/sim/app/(auth)/login/login-form.tsx index 10b2313bfd..c2094755a9 100644 --- a/apps/sim/app/(auth)/login/login-form.tsx +++ b/apps/sim/app/(auth)/login/login-form.tsx @@ -105,7 +105,7 @@ export default function LoginPage({ const [password, setPassword] = useState('') const [passwordErrors, setPasswordErrors] = useState([]) const [showValidationError, setShowValidationError] = useState(false) - const [buttonClass, setButtonClass] = useState('auth-button-gradient') + const [buttonClass, setButtonClass] = useState('branded-button-gradient') const [isButtonHovered, setIsButtonHovered] = useState(false) const [callbackUrl, setCallbackUrl] = useState('/workspace') @@ -146,9 +146,9 @@ export default function LoginPage({ const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('auth-button-custom') + setButtonClass('branded-button-custom') } else { - setButtonClass('auth-button-gradient') + setButtonClass('branded-button-gradient') } } diff --git a/apps/sim/app/(auth)/reset-password/reset-password-form.tsx b/apps/sim/app/(auth)/reset-password/reset-password-form.tsx index 7f5b8647d5..7212b52d53 100644 --- a/apps/sim/app/(auth)/reset-password/reset-password-form.tsx +++ b/apps/sim/app/(auth)/reset-password/reset-password-form.tsx @@ -27,7 +27,7 @@ export function RequestResetForm({ statusMessage, className, }: RequestResetFormProps) { - const [buttonClass, setButtonClass] = useState('auth-button-gradient') + const [buttonClass, setButtonClass] = useState('branded-button-gradient') const [isButtonHovered, setIsButtonHovered] = useState(false) useEffect(() => { @@ -36,9 +36,9 @@ export function RequestResetForm({ const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('auth-button-custom') + setButtonClass('branded-button-custom') } else { - setButtonClass('auth-button-gradient') + setButtonClass('branded-button-gradient') } } @@ -138,7 +138,7 @@ export function SetNewPasswordForm({ const [validationMessage, setValidationMessage] = useState('') const [showPassword, setShowPassword] = useState(false) const [showConfirmPassword, setShowConfirmPassword] = useState(false) - const [buttonClass, setButtonClass] = useState('auth-button-gradient') + const [buttonClass, setButtonClass] = useState('branded-button-gradient') const [isButtonHovered, setIsButtonHovered] = useState(false) useEffect(() => { @@ -147,9 +147,9 @@ export function SetNewPasswordForm({ const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('auth-button-custom') + setButtonClass('branded-button-custom') } else { - setButtonClass('auth-button-gradient') + setButtonClass('branded-button-gradient') } } diff --git a/apps/sim/app/(auth)/signup/signup-form.tsx b/apps/sim/app/(auth)/signup/signup-form.tsx index 108e964909..670d4434b0 100644 --- a/apps/sim/app/(auth)/signup/signup-form.tsx +++ b/apps/sim/app/(auth)/signup/signup-form.tsx @@ -95,7 +95,7 @@ function SignupFormContent({ const [showEmailValidationError, setShowEmailValidationError] = useState(false) const [redirectUrl, setRedirectUrl] = useState('') const [isInviteFlow, setIsInviteFlow] = useState(false) - const [buttonClass, setButtonClass] = useState('auth-button-gradient') + const [buttonClass, setButtonClass] = useState('branded-button-gradient') const [isButtonHovered, setIsButtonHovered] = useState(false) const [name, setName] = useState('') @@ -132,9 +132,9 @@ function SignupFormContent({ const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('auth-button-custom') + setButtonClass('branded-button-custom') } else { - setButtonClass('auth-button-gradient') + setButtonClass('branded-button-gradient') } } diff --git a/apps/sim/app/(auth)/sso/sso-form.tsx b/apps/sim/app/(auth)/sso/sso-form.tsx index 4d01ebd0b1..0d371bbaff 100644 --- a/apps/sim/app/(auth)/sso/sso-form.tsx +++ b/apps/sim/app/(auth)/sso/sso-form.tsx @@ -57,7 +57,7 @@ export default function SSOForm() { const [email, setEmail] = useState('') const [emailErrors, setEmailErrors] = useState([]) const [showEmailValidationError, setShowEmailValidationError] = useState(false) - const [buttonClass, setButtonClass] = useState('auth-button-gradient') + const [buttonClass, setButtonClass] = useState('branded-button-gradient') const [callbackUrl, setCallbackUrl] = useState('/workspace') useEffect(() => { @@ -96,9 +96,9 @@ export default function SSOForm() { const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('auth-button-custom') + setButtonClass('branded-button-custom') } else { - setButtonClass('auth-button-gradient') + setButtonClass('branded-button-gradient') } } diff --git a/apps/sim/app/(auth)/verify/verify-content.tsx b/apps/sim/app/(auth)/verify/verify-content.tsx index 7259205bc8..ed05354b94 100644 --- a/apps/sim/app/(auth)/verify/verify-content.tsx +++ b/apps/sim/app/(auth)/verify/verify-content.tsx @@ -58,7 +58,7 @@ function VerificationForm({ setCountdown(30) } - const [buttonClass, setButtonClass] = useState('auth-button-gradient') + const [buttonClass, setButtonClass] = useState('branded-button-gradient') useEffect(() => { const checkCustomBrand = () => { @@ -66,9 +66,9 @@ function VerificationForm({ const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('auth-button-custom') + setButtonClass('branded-button-custom') } else { - setButtonClass('auth-button-gradient') + setButtonClass('branded-button-gradient') } } diff --git a/apps/sim/app/(landing)/studio/head.tsx b/apps/sim/app/(landing)/studio/head.tsx deleted file mode 100644 index c528800775..0000000000 --- a/apps/sim/app/(landing)/studio/head.tsx +++ /dev/null @@ -1,13 +0,0 @@ -export default function Head() { - return ( - <> - - - - ) -} diff --git a/apps/sim/app/_shell/providers/theme-provider.tsx b/apps/sim/app/_shell/providers/theme-provider.tsx index dae3071b5c..6b3c7f315e 100644 --- a/apps/sim/app/_shell/providers/theme-provider.tsx +++ b/apps/sim/app/_shell/providers/theme-provider.tsx @@ -22,12 +22,13 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) { pathname.startsWith('/changelog') || pathname.startsWith('/chat') || pathname.startsWith('/studio') || - pathname.startsWith('/resume') + pathname.startsWith('/resume') || + pathname.startsWith('/form') return ( { validateAuthToken: vi.fn().mockReturnValue(true), })) - vi.doMock('@sim/logger', () => ({ - createLogger: vi.fn().mockReturnValue({ - debug: vi.fn(), - info: vi.fn(), - warn: vi.fn(), - error: vi.fn(), - }), - })) + // Mock logger - use loggerMock from @sim/testing + vi.doMock('@sim/logger', () => loggerMock) vi.doMock('@sim/db', () => { const mockSelect = vi.fn().mockImplementation((fields) => { diff --git a/apps/sim/app/api/chat/[identifier]/route.ts b/apps/sim/app/api/chat/[identifier]/route.ts index ac9a1c3206..57041c4cc5 100644 --- a/apps/sim/app/api/chat/[identifier]/route.ts +++ b/apps/sim/app/api/chat/[identifier]/route.ts @@ -5,16 +5,12 @@ import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' +import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment' import { generateRequestId } from '@/lib/core/utils/request' import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' import { ChatFiles } from '@/lib/uploads' -import { - addCorsHeaders, - setChatAuthCookie, - validateAuthToken, - validateChatAuth, -} from '@/app/api/chat/utils' +import { setChatAuthCookie, validateChatAuth } from '@/app/api/chat/utils' import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' const logger = createLogger('ChatIdentifierAPI') diff --git a/apps/sim/app/api/chat/manage/[id]/route.test.ts b/apps/sim/app/api/chat/manage/[id]/route.test.ts index 1be5f483b2..12e6b01a9c 100644 --- a/apps/sim/app/api/chat/manage/[id]/route.test.ts +++ b/apps/sim/app/api/chat/manage/[id]/route.test.ts @@ -1,9 +1,10 @@ -import { NextRequest } from 'next/server' /** * Tests for chat edit API route * * @vitest-environment node */ +import { loggerMock } from '@sim/testing' +import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' vi.mock('@/lib/core/config/feature-flags', () => ({ @@ -50,14 +51,8 @@ describe('Chat Edit API Route', () => { chat: { id: 'id', identifier: 'identifier', userId: 'userId' }, })) - vi.doMock('@sim/logger', () => ({ - createLogger: vi.fn().mockReturnValue({ - info: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - debug: vi.fn(), - }), - })) + // Mock logger - use loggerMock from @sim/testing + vi.doMock('@sim/logger', () => loggerMock) vi.doMock('@/app/api/workflows/utils', () => ({ createSuccessResponse: mockCreateSuccessResponse.mockImplementation((data) => { diff --git a/apps/sim/app/api/chat/utils.test.ts b/apps/sim/app/api/chat/utils.test.ts index 70d92990b4..b6678fb53e 100644 --- a/apps/sim/app/api/chat/utils.test.ts +++ b/apps/sim/app/api/chat/utils.test.ts @@ -1,3 +1,4 @@ +import { databaseMock, loggerMock } from '@sim/testing' import type { NextResponse } from 'next/server' /** * Tests for chat API utils @@ -5,14 +6,9 @@ import type { NextResponse } from 'next/server' * @vitest-environment node */ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' -import { env } from '@/lib/core/config/env' -vi.mock('@sim/db', () => ({ - db: { - select: vi.fn(), - update: vi.fn(), - }, -})) +vi.mock('@sim/db', () => databaseMock) +vi.mock('@sim/logger', () => loggerMock) vi.mock('@/lib/logs/execution/logging-session', () => ({ LoggingSession: vi.fn().mockImplementation(() => ({ @@ -52,19 +48,10 @@ vi.mock('@/lib/core/config/feature-flags', () => ({ describe('Chat API Utils', () => { beforeEach(() => { - vi.doMock('@sim/logger', () => ({ - createLogger: vi.fn().mockReturnValue({ - info: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - debug: vi.fn(), - }), - })) - vi.stubGlobal('process', { ...process, env: { - ...env, + ...process.env, NODE_ENV: 'development', }, }) @@ -75,8 +62,8 @@ describe('Chat API Utils', () => { }) describe('Auth token utils', () => { - it('should validate auth tokens', async () => { - const { validateAuthToken } = await import('@/app/api/chat/utils') + it.concurrent('should validate auth tokens', async () => { + const { validateAuthToken } = await import('@/lib/core/security/deployment') const chatId = 'test-chat-id' const type = 'password' @@ -92,8 +79,8 @@ describe('Chat API Utils', () => { expect(isInvalidChat).toBe(false) }) - it('should reject expired tokens', async () => { - const { validateAuthToken } = await import('@/app/api/chat/utils') + it.concurrent('should reject expired tokens', async () => { + const { validateAuthToken } = await import('@/lib/core/security/deployment') const chatId = 'test-chat-id' const expiredToken = Buffer.from( @@ -136,7 +123,7 @@ describe('Chat API Utils', () => { describe('CORS handling', () => { it('should add CORS headers for localhost in development', async () => { - const { addCorsHeaders } = await import('@/app/api/chat/utils') + const { addCorsHeaders } = await import('@/lib/core/security/deployment') const mockRequest = { headers: { @@ -343,7 +330,7 @@ describe('Chat API Utils', () => { }) describe('Execution Result Processing', () => { - it('should process logs regardless of overall success status', () => { + it.concurrent('should process logs regardless of overall success status', () => { const executionResult = { success: false, output: {}, @@ -381,7 +368,7 @@ describe('Chat API Utils', () => { expect(executionResult.logs[1].error).toBe('Agent 2 failed') }) - it('should handle ExecutionResult vs StreamingExecution types correctly', () => { + it.concurrent('should handle ExecutionResult vs StreamingExecution types correctly', () => { const executionResult = { success: true, output: { content: 'test' }, diff --git a/apps/sim/app/api/chat/utils.ts b/apps/sim/app/api/chat/utils.ts index 712886a2ff..654c36c8ba 100644 --- a/apps/sim/app/api/chat/utils.ts +++ b/apps/sim/app/api/chat/utils.ts @@ -1,17 +1,25 @@ -import { createHash } from 'crypto' import { db } from '@sim/db' import { chat, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import type { NextRequest, NextResponse } from 'next/server' -import { isDev } from '@/lib/core/config/feature-flags' +import { + isEmailAllowed, + setDeploymentAuthCookie, + validateAuthToken, +} from '@/lib/core/security/deployment' import { decryptSecret } from '@/lib/core/security/encryption' import { hasAdminPermission } from '@/lib/workspaces/permissions/utils' const logger = createLogger('ChatAuthUtils') -function hashPassword(encryptedPassword: string): string { - return createHash('sha256').update(encryptedPassword).digest('hex').substring(0, 8) +export function setChatAuthCookie( + response: NextResponse, + chatId: string, + type: string, + encryptedPassword?: string | null +): void { + setDeploymentAuthCookie(response, 'chat', chatId, type, encryptedPassword) } /** @@ -82,77 +90,6 @@ export async function checkChatAccess( return { hasAccess: false } } -function encryptAuthToken(chatId: string, type: string, encryptedPassword?: string | null): string { - const pwHash = encryptedPassword ? hashPassword(encryptedPassword) : '' - return Buffer.from(`${chatId}:${type}:${Date.now()}:${pwHash}`).toString('base64') -} - -export function validateAuthToken( - token: string, - chatId: string, - encryptedPassword?: string | null -): boolean { - try { - const decoded = Buffer.from(token, 'base64').toString() - const parts = decoded.split(':') - const [storedId, _type, timestamp, storedPwHash] = parts - - if (storedId !== chatId) { - return false - } - - const createdAt = Number.parseInt(timestamp) - const now = Date.now() - const expireTime = 24 * 60 * 60 * 1000 - - if (now - createdAt > expireTime) { - return false - } - - if (encryptedPassword) { - const currentPwHash = hashPassword(encryptedPassword) - if (storedPwHash !== currentPwHash) { - return false - } - } - - return true - } catch (_e) { - return false - } -} - -export function setChatAuthCookie( - response: NextResponse, - chatId: string, - type: string, - encryptedPassword?: string | null -): void { - const token = encryptAuthToken(chatId, type, encryptedPassword) - response.cookies.set({ - name: `chat_auth_${chatId}`, - value: token, - httpOnly: true, - secure: !isDev, - sameSite: 'lax', - path: '/', - maxAge: 60 * 60 * 24, - }) -} - -export function addCorsHeaders(response: NextResponse, request: NextRequest) { - const origin = request.headers.get('origin') || '' - - if (isDev && origin.includes('localhost')) { - response.headers.set('Access-Control-Allow-Origin', origin) - response.headers.set('Access-Control-Allow-Credentials', 'true') - response.headers.set('Access-Control-Allow-Methods', 'GET, POST, OPTIONS') - response.headers.set('Access-Control-Allow-Headers', 'Content-Type, X-Requested-With') - } - - return response -} - export async function validateChatAuth( requestId: string, deployment: any, @@ -231,12 +168,7 @@ export async function validateChatAuth( const allowedEmails = deployment.allowedEmails || [] - if (allowedEmails.includes(email)) { - return { authorized: false, error: 'otp_required' } - } - - const domain = email.split('@')[1] - if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) { + if (isEmailAllowed(email, allowedEmails)) { return { authorized: false, error: 'otp_required' } } @@ -270,12 +202,7 @@ export async function validateChatAuth( const allowedEmails = deployment.allowedEmails || [] - if (allowedEmails.includes(email)) { - return { authorized: true } - } - - const domain = email.split('@')[1] - if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) { + if (isEmailAllowed(email, allowedEmails)) { return { authorized: true } } @@ -296,12 +223,7 @@ export async function validateChatAuth( const allowedEmails = deployment.allowedEmails || [] - if (allowedEmails.includes(userEmail)) { - return { authorized: true } - } - - const domain = userEmail.split('@')[1] - if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) { + if (isEmailAllowed(userEmail, allowedEmails)) { return { authorized: true } } diff --git a/apps/sim/app/api/environment/route.ts b/apps/sim/app/api/environment/route.ts index 5e7fa4006e..ad2818b0d1 100644 --- a/apps/sim/app/api/environment/route.ts +++ b/apps/sim/app/api/environment/route.ts @@ -7,7 +7,7 @@ import { z } from 'zod' import { getSession } from '@/lib/auth' import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption' import { generateRequestId } from '@/lib/core/utils/request' -import type { EnvironmentVariable } from '@/stores/settings/environment/types' +import type { EnvironmentVariable } from '@/stores/settings/environment' const logger = createLogger('EnvironmentAPI') diff --git a/apps/sim/app/api/form/[identifier]/route.ts b/apps/sim/app/api/form/[identifier]/route.ts new file mode 100644 index 0000000000..bfae3e36e0 --- /dev/null +++ b/apps/sim/app/api/form/[identifier]/route.ts @@ -0,0 +1,414 @@ +import { randomUUID } from 'crypto' +import { db } from '@sim/db' +import { form, workflow, workflowBlocks } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment' +import { generateRequestId } from '@/lib/core/utils/request' +import { preprocessExecution } from '@/lib/execution/preprocessing' +import { LoggingSession } from '@/lib/logs/execution/logging-session' +import { createStreamingResponse } from '@/lib/workflows/streaming/streaming' +import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils' +import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' + +const logger = createLogger('FormIdentifierAPI') + +const formPostBodySchema = z.object({ + formData: z.record(z.unknown()).optional(), + password: z.string().optional(), + email: z.string().email('Invalid email format').optional().or(z.literal('')), +}) + +export const dynamic = 'force-dynamic' +export const runtime = 'nodejs' + +/** + * Get the input format schema from the workflow's start block + */ +async function getWorkflowInputSchema(workflowId: string): Promise { + try { + const blocks = await db + .select() + .from(workflowBlocks) + .where(eq(workflowBlocks.workflowId, workflowId)) + + // Find the start block (starter or start_trigger type) + const startBlock = blocks.find( + (block) => block.type === 'starter' || block.type === 'start_trigger' + ) + + if (!startBlock) { + return [] + } + + // Extract inputFormat from subBlocks + const subBlocks = startBlock.subBlocks as Record | null + if (!subBlocks?.inputFormat?.value) { + return [] + } + + return Array.isArray(subBlocks.inputFormat.value) ? subBlocks.inputFormat.value : [] + } catch (error) { + logger.error('Error fetching workflow input schema:', error) + return [] + } +} + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ identifier: string }> } +) { + const { identifier } = await params + const requestId = generateRequestId() + + try { + logger.debug(`[${requestId}] Processing form submission for identifier: ${identifier}`) + + let parsedBody + try { + const rawBody = await request.json() + const validation = formPostBodySchema.safeParse(rawBody) + + if (!validation.success) { + const errorMessage = validation.error.errors + .map((err) => `${err.path.join('.')}: ${err.message}`) + .join(', ') + logger.warn(`[${requestId}] Validation error: ${errorMessage}`) + return addCorsHeaders( + createErrorResponse(`Invalid request body: ${errorMessage}`, 400), + request + ) + } + + parsedBody = validation.data + } catch (_error) { + return addCorsHeaders(createErrorResponse('Invalid request body', 400), request) + } + + const deploymentResult = await db + .select({ + id: form.id, + workflowId: form.workflowId, + userId: form.userId, + isActive: form.isActive, + authType: form.authType, + password: form.password, + allowedEmails: form.allowedEmails, + customizations: form.customizations, + }) + .from(form) + .where(eq(form.identifier, identifier)) + .limit(1) + + if (deploymentResult.length === 0) { + logger.warn(`[${requestId}] Form not found for identifier: ${identifier}`) + return addCorsHeaders(createErrorResponse('Form not found', 404), request) + } + + const deployment = deploymentResult[0] + + if (!deployment.isActive) { + logger.warn(`[${requestId}] Form is not active: ${identifier}`) + + const [workflowRecord] = await db + .select({ workspaceId: workflow.workspaceId }) + .from(workflow) + .where(eq(workflow.id, deployment.workflowId)) + .limit(1) + + const workspaceId = workflowRecord?.workspaceId + if (!workspaceId) { + logger.warn(`[${requestId}] Cannot log: workflow ${deployment.workflowId} has no workspace`) + return addCorsHeaders( + createErrorResponse('This form is currently unavailable', 403), + request + ) + } + + const executionId = randomUUID() + const loggingSession = new LoggingSession( + deployment.workflowId, + executionId, + 'form', + requestId + ) + + await loggingSession.safeStart({ + userId: deployment.userId, + workspaceId, + variables: {}, + }) + + await loggingSession.safeCompleteWithError({ + error: { + message: 'This form is currently unavailable. The form has been disabled.', + stackTrace: undefined, + }, + traceSpans: [], + }) + + return addCorsHeaders(createErrorResponse('This form is currently unavailable', 403), request) + } + + const authResult = await validateFormAuth(requestId, deployment, request, parsedBody) + if (!authResult.authorized) { + return addCorsHeaders( + createErrorResponse(authResult.error || 'Authentication required', 401), + request + ) + } + + const { formData, password, email } = parsedBody + + // If only authentication credentials provided (no form data), just return authenticated + if ((password || email) && !formData) { + const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request) + setFormAuthCookie(response, deployment.id, deployment.authType, deployment.password) + return response + } + + if (!formData || Object.keys(formData).length === 0) { + return addCorsHeaders(createErrorResponse('No form data provided', 400), request) + } + + const executionId = randomUUID() + const loggingSession = new LoggingSession(deployment.workflowId, executionId, 'form', requestId) + + const preprocessResult = await preprocessExecution({ + workflowId: deployment.workflowId, + userId: deployment.userId, + triggerType: 'form', + executionId, + requestId, + checkRateLimit: true, + checkDeployment: true, + loggingSession, + }) + + if (!preprocessResult.success) { + logger.warn(`[${requestId}] Preprocessing failed: ${preprocessResult.error?.message}`) + return addCorsHeaders( + createErrorResponse( + preprocessResult.error?.message || 'Failed to process request', + preprocessResult.error?.statusCode || 500 + ), + request + ) + } + + const { actorUserId, workflowRecord } = preprocessResult + const workspaceOwnerId = actorUserId! + const workspaceId = workflowRecord?.workspaceId + if (!workspaceId) { + logger.error(`[${requestId}] Workflow ${deployment.workflowId} has no workspaceId`) + return addCorsHeaders( + createErrorResponse('Workflow has no associated workspace', 500), + request + ) + } + + try { + const workflowForExecution = { + id: deployment.workflowId, + userId: deployment.userId, + workspaceId, + isDeployed: workflowRecord?.isDeployed ?? false, + variables: (workflowRecord?.variables ?? {}) as Record, + } + + // Pass form data as the workflow input + const workflowInput = { + input: formData, + ...formData, // Spread form fields at top level for convenience + } + + // Execute workflow using streaming (for consistency with chat) + const stream = await createStreamingResponse({ + requestId, + workflow: workflowForExecution, + input: workflowInput, + executingUserId: workspaceOwnerId, + streamConfig: { + selectedOutputs: [], + isSecureMode: true, + workflowTriggerType: 'api', // Use 'api' type since form is similar + }, + executionId, + }) + + // For forms, we don't stream back - we wait for completion and return success + // Consume the stream to wait for completion + const reader = stream.getReader() + let lastOutput: any = null + + try { + while (true) { + const { done, value } = await reader.read() + if (done) break + + // Parse SSE data if present + const text = new TextDecoder().decode(value) + const lines = text.split('\n') + for (const line of lines) { + if (line.startsWith('data: ')) { + try { + const data = JSON.parse(line.slice(6)) + if (data.type === 'complete' || data.output) { + lastOutput = data.output || data + } + } catch { + // Ignore parse errors + } + } + } + } + } finally { + reader.releaseLock() + } + + logger.info(`[${requestId}] Form submission successful for ${identifier}`) + + // Return success with customizations for thank you screen + const customizations = deployment.customizations as Record | null + return addCorsHeaders( + createSuccessResponse({ + success: true, + executionId, + thankYouTitle: customizations?.thankYouTitle || 'Thank you!', + thankYouMessage: + customizations?.thankYouMessage || 'Your response has been submitted successfully.', + }), + request + ) + } catch (error: any) { + logger.error(`[${requestId}] Error processing form submission:`, error) + return addCorsHeaders( + createErrorResponse(error.message || 'Failed to process form submission', 500), + request + ) + } + } catch (error: any) { + logger.error(`[${requestId}] Error processing form submission:`, error) + return addCorsHeaders( + createErrorResponse(error.message || 'Failed to process form submission', 500), + request + ) + } +} + +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ identifier: string }> } +) { + const { identifier } = await params + const requestId = generateRequestId() + + try { + logger.debug(`[${requestId}] Fetching form info for identifier: ${identifier}`) + + const deploymentResult = await db + .select({ + id: form.id, + title: form.title, + description: form.description, + customizations: form.customizations, + isActive: form.isActive, + workflowId: form.workflowId, + authType: form.authType, + password: form.password, + allowedEmails: form.allowedEmails, + showBranding: form.showBranding, + }) + .from(form) + .where(eq(form.identifier, identifier)) + .limit(1) + + if (deploymentResult.length === 0) { + logger.warn(`[${requestId}] Form not found for identifier: ${identifier}`) + return addCorsHeaders(createErrorResponse('Form not found', 404), request) + } + + const deployment = deploymentResult[0] + + if (!deployment.isActive) { + logger.warn(`[${requestId}] Form is not active: ${identifier}`) + return addCorsHeaders(createErrorResponse('This form is currently unavailable', 403), request) + } + + // Get the workflow's input schema + const inputSchema = await getWorkflowInputSchema(deployment.workflowId) + + const cookieName = `form_auth_${deployment.id}` + const authCookie = request.cookies.get(cookieName) + + // If authenticated (via cookie), return full form config + if ( + deployment.authType !== 'public' && + authCookie && + validateAuthToken(authCookie.value, deployment.id, deployment.password) + ) { + return addCorsHeaders( + createSuccessResponse({ + id: deployment.id, + title: deployment.title, + description: deployment.description, + customizations: deployment.customizations, + authType: deployment.authType, + showBranding: deployment.showBranding, + inputSchema, + }), + request + ) + } + + // Check authentication requirement + const authResult = await validateFormAuth(requestId, deployment, request) + if (!authResult.authorized) { + // Return limited info for auth required forms + logger.info( + `[${requestId}] Authentication required for form: ${identifier}, type: ${deployment.authType}` + ) + return addCorsHeaders( + NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + authType: deployment.authType, + title: deployment.title, + customizations: { + primaryColor: (deployment.customizations as any)?.primaryColor, + logoUrl: (deployment.customizations as any)?.logoUrl, + }, + }, + { status: 401 } + ), + request + ) + } + + return addCorsHeaders( + createSuccessResponse({ + id: deployment.id, + title: deployment.title, + description: deployment.description, + customizations: deployment.customizations, + authType: deployment.authType, + showBranding: deployment.showBranding, + inputSchema, + }), + request + ) + } catch (error: any) { + logger.error(`[${requestId}] Error fetching form info:`, error) + return addCorsHeaders( + createErrorResponse(error.message || 'Failed to fetch form information', 500), + request + ) + } +} + +export async function OPTIONS(request: NextRequest) { + return addCorsHeaders(new NextResponse(null, { status: 204 }), request) +} diff --git a/apps/sim/app/api/form/manage/[id]/route.ts b/apps/sim/app/api/form/manage/[id]/route.ts new file mode 100644 index 0000000000..f2f1cbd1fb --- /dev/null +++ b/apps/sim/app/api/form/manage/[id]/route.ts @@ -0,0 +1,233 @@ +import { db } from '@sim/db' +import { form } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import type { NextRequest } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { encryptSecret } from '@/lib/core/security/encryption' +import { checkFormAccess, DEFAULT_FORM_CUSTOMIZATIONS } from '@/app/api/form/utils' +import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' + +const logger = createLogger('FormManageAPI') + +const fieldConfigSchema = z.object({ + name: z.string(), + type: z.string(), + label: z.string(), + description: z.string().optional(), + required: z.boolean().optional(), +}) + +const updateFormSchema = z.object({ + identifier: z + .string() + .min(1, 'Identifier is required') + .max(100, 'Identifier must be 100 characters or less') + .regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens') + .optional(), + title: z + .string() + .min(1, 'Title is required') + .max(200, 'Title must be 200 characters or less') + .optional(), + description: z.string().max(1000, 'Description must be 1000 characters or less').optional(), + customizations: z + .object({ + primaryColor: z.string().optional(), + welcomeMessage: z + .string() + .max(500, 'Welcome message must be 500 characters or less') + .optional(), + thankYouTitle: z + .string() + .max(100, 'Thank you title must be 100 characters or less') + .optional(), + thankYouMessage: z + .string() + .max(500, 'Thank you message must be 500 characters or less') + .optional(), + logoUrl: z.string().url('Logo URL must be a valid URL').optional().or(z.literal('')), + fieldConfigs: z.array(fieldConfigSchema).optional(), + }) + .optional(), + authType: z.enum(['public', 'password', 'email']).optional(), + password: z + .string() + .min(6, 'Password must be at least 6 characters') + .optional() + .or(z.literal('')), + allowedEmails: z.array(z.string()).optional(), + showBranding: z.boolean().optional(), + isActive: z.boolean().optional(), +}) + +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + try { + const session = await getSession() + + if (!session) { + return createErrorResponse('Unauthorized', 401) + } + + const { id } = await params + + const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id) + + if (!hasAccess || !formRecord) { + return createErrorResponse('Form not found or access denied', 404) + } + + const { password: _password, ...formWithoutPassword } = formRecord + + return createSuccessResponse({ + form: { + ...formWithoutPassword, + hasPassword: !!formRecord.password, + }, + }) + } catch (error: any) { + logger.error('Error fetching form:', error) + return createErrorResponse(error.message || 'Failed to fetch form', 500) + } +} + +export async function PATCH(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + try { + const session = await getSession() + + if (!session) { + return createErrorResponse('Unauthorized', 401) + } + + const { id } = await params + + const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id) + + if (!hasAccess || !formRecord) { + return createErrorResponse('Form not found or access denied', 404) + } + + const body = await request.json() + + try { + const validatedData = updateFormSchema.parse(body) + + const { + identifier, + title, + description, + customizations, + authType, + password, + allowedEmails, + showBranding, + isActive, + } = validatedData + + if (identifier && identifier !== formRecord.identifier) { + const existingIdentifier = await db + .select() + .from(form) + .where(eq(form.identifier, identifier)) + .limit(1) + + if (existingIdentifier.length > 0) { + return createErrorResponse('Identifier already in use', 400) + } + } + + if (authType === 'password' && !password && !formRecord.password) { + return createErrorResponse('Password is required when using password protection', 400) + } + + if ( + authType === 'email' && + (!allowedEmails || allowedEmails.length === 0) && + (!formRecord.allowedEmails || (formRecord.allowedEmails as string[]).length === 0) + ) { + return createErrorResponse( + 'At least one email or domain is required when using email access control', + 400 + ) + } + + const updateData: Record = { + updatedAt: new Date(), + } + + if (identifier !== undefined) updateData.identifier = identifier + if (title !== undefined) updateData.title = title + if (description !== undefined) updateData.description = description + if (showBranding !== undefined) updateData.showBranding = showBranding + if (isActive !== undefined) updateData.isActive = isActive + if (authType !== undefined) updateData.authType = authType + if (allowedEmails !== undefined) updateData.allowedEmails = allowedEmails + + if (customizations !== undefined) { + const existingCustomizations = (formRecord.customizations as Record) || {} + updateData.customizations = { + ...DEFAULT_FORM_CUSTOMIZATIONS, + ...existingCustomizations, + ...customizations, + } + } + + if (password) { + const { encrypted } = await encryptSecret(password) + updateData.password = encrypted + } else if (authType && authType !== 'password') { + updateData.password = null + } + + await db.update(form).set(updateData).where(eq(form.id, id)) + + logger.info(`Form ${id} updated successfully`) + + return createSuccessResponse({ + message: 'Form updated successfully', + }) + } catch (validationError) { + if (validationError instanceof z.ZodError) { + const errorMessage = validationError.errors[0]?.message || 'Invalid request data' + return createErrorResponse(errorMessage, 400, 'VALIDATION_ERROR') + } + throw validationError + } + } catch (error: any) { + logger.error('Error updating form:', error) + return createErrorResponse(error.message || 'Failed to update form', 500) + } +} + +export async function DELETE( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + try { + const session = await getSession() + + if (!session) { + return createErrorResponse('Unauthorized', 401) + } + + const { id } = await params + + const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id) + + if (!hasAccess || !formRecord) { + return createErrorResponse('Form not found or access denied', 404) + } + + await db.update(form).set({ isActive: false, updatedAt: new Date() }).where(eq(form.id, id)) + + logger.info(`Form ${id} deleted (soft delete)`) + + return createSuccessResponse({ + message: 'Form deleted successfully', + }) + } catch (error: any) { + logger.error('Error deleting form:', error) + return createErrorResponse(error.message || 'Failed to delete form', 500) + } +} diff --git a/apps/sim/app/api/form/route.ts b/apps/sim/app/api/form/route.ts new file mode 100644 index 0000000000..ada13f5ee1 --- /dev/null +++ b/apps/sim/app/api/form/route.ts @@ -0,0 +1,214 @@ +import { db } from '@sim/db' +import { form } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import type { NextRequest } from 'next/server' +import { v4 as uuidv4 } from 'uuid' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { isDev } from '@/lib/core/config/feature-flags' +import { encryptSecret } from '@/lib/core/security/encryption' +import { getEmailDomain } from '@/lib/core/utils/urls' +import { deployWorkflow } from '@/lib/workflows/persistence/utils' +import { + checkWorkflowAccessForFormCreation, + DEFAULT_FORM_CUSTOMIZATIONS, +} from '@/app/api/form/utils' +import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' + +const logger = createLogger('FormAPI') + +const fieldConfigSchema = z.object({ + name: z.string(), + type: z.string(), + label: z.string(), + description: z.string().optional(), + required: z.boolean().optional(), +}) + +const formSchema = z.object({ + workflowId: z.string().min(1, 'Workflow ID is required'), + identifier: z + .string() + .min(1, 'Identifier is required') + .max(100, 'Identifier must be 100 characters or less') + .regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens'), + title: z.string().min(1, 'Title is required').max(200, 'Title must be 200 characters or less'), + description: z.string().max(1000, 'Description must be 1000 characters or less').optional(), + customizations: z + .object({ + primaryColor: z.string().optional(), + welcomeMessage: z + .string() + .max(500, 'Welcome message must be 500 characters or less') + .optional(), + thankYouTitle: z + .string() + .max(100, 'Thank you title must be 100 characters or less') + .optional(), + thankYouMessage: z + .string() + .max(500, 'Thank you message must be 500 characters or less') + .optional(), + logoUrl: z.string().url('Logo URL must be a valid URL').optional().or(z.literal('')), + fieldConfigs: z.array(fieldConfigSchema).optional(), + }) + .optional(), + authType: z.enum(['public', 'password', 'email']).default('public'), + password: z + .string() + .min(6, 'Password must be at least 6 characters') + .optional() + .or(z.literal('')), + allowedEmails: z.array(z.string()).optional().default([]), + showBranding: z.boolean().optional().default(true), +}) + +export async function GET(request: NextRequest) { + try { + const session = await getSession() + + if (!session) { + return createErrorResponse('Unauthorized', 401) + } + + const deployments = await db.select().from(form).where(eq(form.userId, session.user.id)) + + return createSuccessResponse({ deployments }) + } catch (error: any) { + logger.error('Error fetching form deployments:', error) + return createErrorResponse(error.message || 'Failed to fetch form deployments', 500) + } +} + +export async function POST(request: NextRequest) { + try { + const session = await getSession() + + if (!session) { + return createErrorResponse('Unauthorized', 401) + } + + const body = await request.json() + + try { + const validatedData = formSchema.parse(body) + + const { + workflowId, + identifier, + title, + description = '', + customizations, + authType = 'public', + password, + allowedEmails = [], + showBranding = true, + } = validatedData + + if (authType === 'password' && !password) { + return createErrorResponse('Password is required when using password protection', 400) + } + + if (authType === 'email' && (!Array.isArray(allowedEmails) || allowedEmails.length === 0)) { + return createErrorResponse( + 'At least one email or domain is required when using email access control', + 400 + ) + } + + const existingIdentifier = await db + .select() + .from(form) + .where(eq(form.identifier, identifier)) + .limit(1) + + if (existingIdentifier.length > 0) { + return createErrorResponse('Identifier already in use', 400) + } + + const { hasAccess, workflow: workflowRecord } = await checkWorkflowAccessForFormCreation( + workflowId, + session.user.id + ) + + if (!hasAccess || !workflowRecord) { + return createErrorResponse('Workflow not found or access denied', 404) + } + + const result = await deployWorkflow({ + workflowId, + deployedBy: session.user.id, + }) + + if (!result.success) { + return createErrorResponse(result.error || 'Failed to deploy workflow', 500) + } + + logger.info( + `${workflowRecord.isDeployed ? 'Redeployed' : 'Auto-deployed'} workflow ${workflowId} for form (v${result.version})` + ) + + let encryptedPassword = null + if (authType === 'password' && password) { + const { encrypted } = await encryptSecret(password) + encryptedPassword = encrypted + } + + const id = uuidv4() + + logger.info('Creating form deployment with values:', { + workflowId, + identifier, + title, + authType, + hasPassword: !!encryptedPassword, + emailCount: allowedEmails?.length || 0, + showBranding, + }) + + const mergedCustomizations = { + ...DEFAULT_FORM_CUSTOMIZATIONS, + ...(customizations || {}), + } + + await db.insert(form).values({ + id, + workflowId, + userId: session.user.id, + identifier, + title, + description: description || '', + customizations: mergedCustomizations, + isActive: true, + authType, + password: encryptedPassword, + allowedEmails: authType === 'email' ? allowedEmails : [], + showBranding, + createdAt: new Date(), + updatedAt: new Date(), + }) + + const baseDomain = getEmailDomain() + const protocol = isDev ? 'http' : 'https' + const formUrl = `${protocol}://${baseDomain}/form/${identifier}` + + logger.info(`Form "${title}" deployed successfully at ${formUrl}`) + + return createSuccessResponse({ + id, + formUrl, + message: 'Form deployment created successfully', + }) + } catch (validationError) { + if (validationError instanceof z.ZodError) { + const errorMessage = validationError.errors[0]?.message || 'Invalid request data' + return createErrorResponse(errorMessage, 400, 'VALIDATION_ERROR') + } + throw validationError + } + } catch (error: any) { + logger.error('Error creating form deployment:', error) + return createErrorResponse(error.message || 'Failed to create form deployment', 500) + } +} diff --git a/apps/sim/app/api/form/utils.test.ts b/apps/sim/app/api/form/utils.test.ts new file mode 100644 index 0000000000..4c5a220eae --- /dev/null +++ b/apps/sim/app/api/form/utils.test.ts @@ -0,0 +1,367 @@ +import { databaseMock, loggerMock } from '@sim/testing' +import type { NextResponse } from 'next/server' +/** + * Tests for form API utils + * + * @vitest-environment node + */ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' + +vi.mock('@sim/db', () => databaseMock) +vi.mock('@sim/logger', () => loggerMock) + +const mockDecryptSecret = vi.fn() + +vi.mock('@/lib/core/security/encryption', () => ({ + decryptSecret: mockDecryptSecret, +})) + +vi.mock('@/lib/core/config/feature-flags', () => ({ + isDev: true, + isHosted: false, + isProd: false, +})) + +vi.mock('@/lib/workspaces/permissions/utils', () => ({ + hasAdminPermission: vi.fn(), +})) + +describe('Form API Utils', () => { + afterEach(() => { + vi.clearAllMocks() + }) + + describe('Auth token utils', () => { + it.concurrent('should validate auth tokens', async () => { + const { validateAuthToken } = await import('@/lib/core/security/deployment') + + const formId = 'test-form-id' + const type = 'password' + + const token = Buffer.from(`${formId}:${type}:${Date.now()}`).toString('base64') + expect(typeof token).toBe('string') + expect(token.length).toBeGreaterThan(0) + + const isValid = validateAuthToken(token, formId) + expect(isValid).toBe(true) + + const isInvalidForm = validateAuthToken(token, 'wrong-form-id') + expect(isInvalidForm).toBe(false) + }) + + it.concurrent('should reject expired tokens', async () => { + const { validateAuthToken } = await import('@/lib/core/security/deployment') + + const formId = 'test-form-id' + const expiredToken = Buffer.from( + `${formId}:password:${Date.now() - 25 * 60 * 60 * 1000}` + ).toString('base64') + + const isValid = validateAuthToken(expiredToken, formId) + expect(isValid).toBe(false) + }) + + it.concurrent('should validate tokens with password hash', async () => { + const { validateAuthToken } = await import('@/lib/core/security/deployment') + const crypto = await import('crypto') + + const formId = 'test-form-id' + const encryptedPassword = 'encrypted-password-value' + const pwHash = crypto + .createHash('sha256') + .update(encryptedPassword) + .digest('hex') + .substring(0, 8) + + const token = Buffer.from(`${formId}:password:${Date.now()}:${pwHash}`).toString('base64') + + const isValid = validateAuthToken(token, formId, encryptedPassword) + expect(isValid).toBe(true) + + const isInvalidPassword = validateAuthToken(token, formId, 'different-password') + expect(isInvalidPassword).toBe(false) + }) + }) + + describe('Cookie handling', () => { + it('should set auth cookie correctly', async () => { + const { setFormAuthCookie } = await import('@/app/api/form/utils') + + const mockSet = vi.fn() + const mockResponse = { + cookies: { + set: mockSet, + }, + } as unknown as NextResponse + + const formId = 'test-form-id' + const type = 'password' + + setFormAuthCookie(mockResponse, formId, type) + + expect(mockSet).toHaveBeenCalledWith({ + name: `form_auth_${formId}`, + value: expect.any(String), + httpOnly: true, + secure: false, // Development mode + sameSite: 'lax', + path: '/', + maxAge: 60 * 60 * 24, + }) + }) + }) + + describe('CORS handling', () => { + it.concurrent('should add CORS headers for any origin', async () => { + const { addCorsHeaders } = await import('@/lib/core/security/deployment') + + const mockRequest = { + headers: { + get: vi.fn().mockReturnValue('http://localhost:3000'), + }, + } as any + + const mockResponse = { + headers: { + set: vi.fn(), + }, + } as unknown as NextResponse + + addCorsHeaders(mockResponse, mockRequest) + + expect(mockResponse.headers.set).toHaveBeenCalledWith( + 'Access-Control-Allow-Origin', + 'http://localhost:3000' + ) + expect(mockResponse.headers.set).toHaveBeenCalledWith( + 'Access-Control-Allow-Credentials', + 'true' + ) + expect(mockResponse.headers.set).toHaveBeenCalledWith( + 'Access-Control-Allow-Methods', + 'GET, POST, OPTIONS' + ) + expect(mockResponse.headers.set).toHaveBeenCalledWith( + 'Access-Control-Allow-Headers', + 'Content-Type, X-Requested-With' + ) + }) + + it.concurrent('should not set CORS headers when no origin', async () => { + const { addCorsHeaders } = await import('@/lib/core/security/deployment') + + const mockRequest = { + headers: { + get: vi.fn().mockReturnValue(''), + }, + } as any + + const mockResponse = { + headers: { + set: vi.fn(), + }, + } as unknown as NextResponse + + addCorsHeaders(mockResponse, mockRequest) + + expect(mockResponse.headers.set).not.toHaveBeenCalled() + }) + }) + + describe('Form auth validation', () => { + beforeEach(async () => { + vi.clearAllMocks() + mockDecryptSecret.mockResolvedValue({ decrypted: 'correct-password' }) + }) + + it('should allow access to public forms', async () => { + const { validateFormAuth } = await import('@/app/api/form/utils') + + const deployment = { + id: 'form-id', + authType: 'public', + } + + const mockRequest = { + cookies: { + get: vi.fn().mockReturnValue(null), + }, + } as any + + const result = await validateFormAuth('request-id', deployment, mockRequest) + + expect(result.authorized).toBe(true) + }) + + it('should request password auth for GET requests', async () => { + const { validateFormAuth } = await import('@/app/api/form/utils') + + const deployment = { + id: 'form-id', + authType: 'password', + } + + const mockRequest = { + method: 'GET', + cookies: { + get: vi.fn().mockReturnValue(null), + }, + } as any + + const result = await validateFormAuth('request-id', deployment, mockRequest) + + expect(result.authorized).toBe(false) + expect(result.error).toBe('auth_required_password') + }) + + it('should validate password for POST requests', async () => { + const { validateFormAuth } = await import('@/app/api/form/utils') + const { decryptSecret } = await import('@/lib/core/security/encryption') + + const deployment = { + id: 'form-id', + authType: 'password', + password: 'encrypted-password', + } + + const mockRequest = { + method: 'POST', + cookies: { + get: vi.fn().mockReturnValue(null), + }, + } as any + + const parsedBody = { + password: 'correct-password', + } + + const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody) + + expect(decryptSecret).toHaveBeenCalledWith('encrypted-password') + expect(result.authorized).toBe(true) + }) + + it('should reject incorrect password', async () => { + const { validateFormAuth } = await import('@/app/api/form/utils') + + const deployment = { + id: 'form-id', + authType: 'password', + password: 'encrypted-password', + } + + const mockRequest = { + method: 'POST', + cookies: { + get: vi.fn().mockReturnValue(null), + }, + } as any + + const parsedBody = { + password: 'wrong-password', + } + + const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody) + + expect(result.authorized).toBe(false) + expect(result.error).toBe('Invalid password') + }) + + it('should request email auth for email-protected forms', async () => { + const { validateFormAuth } = await import('@/app/api/form/utils') + + const deployment = { + id: 'form-id', + authType: 'email', + allowedEmails: ['user@example.com', '@company.com'], + } + + const mockRequest = { + method: 'GET', + cookies: { + get: vi.fn().mockReturnValue(null), + }, + } as any + + const result = await validateFormAuth('request-id', deployment, mockRequest) + + expect(result.authorized).toBe(false) + expect(result.error).toBe('auth_required_email') + }) + + it('should check allowed emails for email auth', async () => { + const { validateFormAuth } = await import('@/app/api/form/utils') + + const deployment = { + id: 'form-id', + authType: 'email', + allowedEmails: ['user@example.com', '@company.com'], + } + + const mockRequest = { + method: 'POST', + cookies: { + get: vi.fn().mockReturnValue(null), + }, + } as any + + // Exact email match should authorize + const result1 = await validateFormAuth('request-id', deployment, mockRequest, { + email: 'user@example.com', + }) + expect(result1.authorized).toBe(true) + + // Domain match should authorize + const result2 = await validateFormAuth('request-id', deployment, mockRequest, { + email: 'other@company.com', + }) + expect(result2.authorized).toBe(true) + + // Unknown email should not authorize + const result3 = await validateFormAuth('request-id', deployment, mockRequest, { + email: 'user@unknown.com', + }) + expect(result3.authorized).toBe(false) + expect(result3.error).toBe('Email not authorized for this form') + }) + + it('should require password when formData is present without password', async () => { + const { validateFormAuth } = await import('@/app/api/form/utils') + + const deployment = { + id: 'form-id', + authType: 'password', + password: 'encrypted-password', + } + + const mockRequest = { + method: 'POST', + cookies: { + get: vi.fn().mockReturnValue(null), + }, + } as any + + const parsedBody = { + formData: { field1: 'value1' }, + // No password provided + } + + const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody) + + expect(result.authorized).toBe(false) + expect(result.error).toBe('auth_required_password') + }) + }) + + describe('Default customizations', () => { + it.concurrent('should have correct default values', async () => { + const { DEFAULT_FORM_CUSTOMIZATIONS } = await import('@/app/api/form/utils') + + expect(DEFAULT_FORM_CUSTOMIZATIONS).toEqual({ + welcomeMessage: '', + thankYouTitle: 'Thank you!', + thankYouMessage: 'Your response has been submitted successfully.', + }) + }) + }) +}) diff --git a/apps/sim/app/api/form/utils.ts b/apps/sim/app/api/form/utils.ts new file mode 100644 index 0000000000..34255df600 --- /dev/null +++ b/apps/sim/app/api/form/utils.ts @@ -0,0 +1,204 @@ +import { db } from '@sim/db' +import { form, workflow } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import type { NextRequest, NextResponse } from 'next/server' +import { + isEmailAllowed, + setDeploymentAuthCookie, + validateAuthToken, +} from '@/lib/core/security/deployment' +import { decryptSecret } from '@/lib/core/security/encryption' +import { hasAdminPermission } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('FormAuthUtils') + +export function setFormAuthCookie( + response: NextResponse, + formId: string, + type: string, + encryptedPassword?: string | null +): void { + setDeploymentAuthCookie(response, 'form', formId, type, encryptedPassword) +} + +/** + * Check if user has permission to create a form for a specific workflow + * Either the user owns the workflow directly OR has admin permission for the workflow's workspace + */ +export async function checkWorkflowAccessForFormCreation( + workflowId: string, + userId: string +): Promise<{ hasAccess: boolean; workflow?: any }> { + const workflowData = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1) + + if (workflowData.length === 0) { + return { hasAccess: false } + } + + const workflowRecord = workflowData[0] + + if (workflowRecord.userId === userId) { + return { hasAccess: true, workflow: workflowRecord } + } + + if (workflowRecord.workspaceId) { + const hasAdmin = await hasAdminPermission(userId, workflowRecord.workspaceId) + if (hasAdmin) { + return { hasAccess: true, workflow: workflowRecord } + } + } + + return { hasAccess: false } +} + +/** + * Check if user has access to view/edit/delete a specific form + * Either the user owns the form directly OR has admin permission for the workflow's workspace + */ +export async function checkFormAccess( + formId: string, + userId: string +): Promise<{ hasAccess: boolean; form?: any }> { + const formData = await db + .select({ + form: form, + workflowWorkspaceId: workflow.workspaceId, + }) + .from(form) + .innerJoin(workflow, eq(form.workflowId, workflow.id)) + .where(eq(form.id, formId)) + .limit(1) + + if (formData.length === 0) { + return { hasAccess: false } + } + + const { form: formRecord, workflowWorkspaceId } = formData[0] + + if (formRecord.userId === userId) { + return { hasAccess: true, form: formRecord } + } + + if (workflowWorkspaceId) { + const hasAdmin = await hasAdminPermission(userId, workflowWorkspaceId) + if (hasAdmin) { + return { hasAccess: true, form: formRecord } + } + } + + return { hasAccess: false } +} + +export async function validateFormAuth( + requestId: string, + deployment: any, + request: NextRequest, + parsedBody?: any +): Promise<{ authorized: boolean; error?: string }> { + const authType = deployment.authType || 'public' + + if (authType === 'public') { + return { authorized: true } + } + + const cookieName = `form_auth_${deployment.id}` + const authCookie = request.cookies.get(cookieName) + + if (authCookie && validateAuthToken(authCookie.value, deployment.id, deployment.password)) { + return { authorized: true } + } + + if (authType === 'password') { + if (request.method === 'GET') { + return { authorized: false, error: 'auth_required_password' } + } + + try { + if (!parsedBody) { + return { authorized: false, error: 'Password is required' } + } + + const { password, formData } = parsedBody + + if (formData && !password) { + return { authorized: false, error: 'auth_required_password' } + } + + if (!password) { + return { authorized: false, error: 'Password is required' } + } + + if (!deployment.password) { + logger.error(`[${requestId}] No password set for password-protected form: ${deployment.id}`) + return { authorized: false, error: 'Authentication configuration error' } + } + + const { decrypted } = await decryptSecret(deployment.password) + if (password !== decrypted) { + return { authorized: false, error: 'Invalid password' } + } + + return { authorized: true } + } catch (error) { + logger.error(`[${requestId}] Error validating password:`, error) + return { authorized: false, error: 'Authentication error' } + } + } + + if (authType === 'email') { + if (request.method === 'GET') { + return { authorized: false, error: 'auth_required_email' } + } + + try { + if (!parsedBody) { + return { authorized: false, error: 'Email is required' } + } + + const { email, formData } = parsedBody + + if (formData && !email) { + return { authorized: false, error: 'auth_required_email' } + } + + if (!email) { + return { authorized: false, error: 'Email is required' } + } + + const allowedEmails: string[] = deployment.allowedEmails || [] + + if (isEmailAllowed(email, allowedEmails)) { + return { authorized: true } + } + + return { authorized: false, error: 'Email not authorized for this form' } + } catch (error) { + logger.error(`[${requestId}] Error validating email:`, error) + return { authorized: false, error: 'Authentication error' } + } + } + + return { authorized: false, error: 'Unsupported authentication type' } +} + +/** + * Form customizations interface + */ +export interface FormCustomizations { + primaryColor?: string + welcomeMessage?: string + thankYouTitle?: string + thankYouMessage?: string + logoUrl?: string +} + +/** + * Default form customizations + * Note: primaryColor is intentionally undefined to allow thank you screen to use its green default + */ +export const DEFAULT_FORM_CUSTOMIZATIONS: FormCustomizations = { + welcomeMessage: '', + thankYouTitle: 'Thank you!', + thankYouMessage: 'Your response has been submitted successfully.', +} diff --git a/apps/sim/app/api/form/validate/route.ts b/apps/sim/app/api/form/validate/route.ts new file mode 100644 index 0000000000..8352149fd9 --- /dev/null +++ b/apps/sim/app/api/form/validate/route.ts @@ -0,0 +1,71 @@ +import { db } from '@sim/db' +import { form } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import type { NextRequest } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' + +const logger = createLogger('FormValidateAPI') + +const validateQuerySchema = z.object({ + identifier: z + .string() + .min(1, 'Identifier is required') + .regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens') + .max(100, 'Identifier must be 100 characters or less'), +}) + +/** + * GET endpoint to validate form identifier availability + */ +export async function GET(request: NextRequest) { + try { + const session = await getSession() + if (!session?.user?.id) { + return createErrorResponse('Unauthorized', 401) + } + const { searchParams } = new URL(request.url) + const identifier = searchParams.get('identifier') + + const validation = validateQuerySchema.safeParse({ identifier }) + + if (!validation.success) { + const errorMessage = validation.error.errors[0]?.message || 'Invalid identifier' + logger.warn(`Validation error: ${errorMessage}`) + + if (identifier && !/^[a-z0-9-]+$/.test(identifier)) { + return createSuccessResponse({ + available: false, + error: errorMessage, + }) + } + + return createErrorResponse(errorMessage, 400) + } + + const { identifier: validatedIdentifier } = validation.data + + const existingForm = await db + .select({ id: form.id }) + .from(form) + .where(eq(form.identifier, validatedIdentifier)) + .limit(1) + + const isAvailable = existingForm.length === 0 + + logger.debug( + `Identifier "${validatedIdentifier}" availability check: ${isAvailable ? 'available' : 'taken'}` + ) + + return createSuccessResponse({ + available: isAvailable, + error: isAvailable ? null : 'This identifier is already in use', + }) + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Failed to validate identifier' + logger.error('Error validating form identifier:', error) + return createErrorResponse(message, 500) + } +} diff --git a/apps/sim/app/api/function/execute/route.test.ts b/apps/sim/app/api/function/execute/route.test.ts index 12bf26a7ab..783b89d1b2 100644 --- a/apps/sim/app/api/function/execute/route.test.ts +++ b/apps/sim/app/api/function/execute/route.test.ts @@ -3,6 +3,7 @@ * * @vitest-environment node */ +import { loggerMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import { createMockRequest } from '@/app/api/__test-utils__/utils' @@ -82,14 +83,7 @@ vi.mock('@/lib/execution/isolated-vm', () => ({ }), })) -vi.mock('@sim/logger', () => ({ - createLogger: vi.fn(() => ({ - info: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - debug: vi.fn(), - })), -})) +vi.mock('@sim/logger', () => loggerMock) vi.mock('@/lib/execution/e2b', () => ({ executeInE2B: vi.fn(), diff --git a/apps/sim/app/api/knowledge/search/utils.test.ts b/apps/sim/app/api/knowledge/search/utils.test.ts index e5ebe22a8e..279f7e56e7 100644 --- a/apps/sim/app/api/knowledge/search/utils.test.ts +++ b/apps/sim/app/api/knowledge/search/utils.test.ts @@ -4,18 +4,15 @@ * * @vitest-environment node */ -import { createEnvMock } from '@sim/testing' +import { createEnvMock, createMockLogger } from '@sim/testing' import { beforeEach, describe, expect, it, vi } from 'vitest' -vi.mock('drizzle-orm') -vi.mock('@sim/logger', () => ({ - createLogger: vi.fn(() => ({ - info: vi.fn(), - debug: vi.fn(), - warn: vi.fn(), - error: vi.fn(), - })), +const loggerMock = vi.hoisted(() => ({ + createLogger: () => createMockLogger(), })) + +vi.mock('drizzle-orm') +vi.mock('@sim/logger', () => loggerMock) vi.mock('@sim/db') vi.mock('@/lib/knowledge/documents/utils', () => ({ retryWithExponentialBackoff: (fn: any) => fn(), diff --git a/apps/sim/app/api/proxy/tts/stream/route.ts b/apps/sim/app/api/proxy/tts/stream/route.ts index 35b045fc94..807c19d900 100644 --- a/apps/sim/app/api/proxy/tts/stream/route.ts +++ b/apps/sim/app/api/proxy/tts/stream/route.ts @@ -4,8 +4,8 @@ import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { env } from '@/lib/core/config/env' +import { validateAuthToken } from '@/lib/core/security/deployment' import { validateAlphanumericId } from '@/lib/core/security/input-validation' -import { validateAuthToken } from '@/app/api/chat/utils' const logger = createLogger('ProxyTTSStreamAPI') diff --git a/apps/sim/app/api/schedules/[id]/route.test.ts b/apps/sim/app/api/schedules/[id]/route.test.ts index 0ab1195884..b7ce032a4b 100644 --- a/apps/sim/app/api/schedules/[id]/route.test.ts +++ b/apps/sim/app/api/schedules/[id]/route.test.ts @@ -3,6 +3,7 @@ * * @vitest-environment node */ +import { loggerMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -43,14 +44,7 @@ vi.mock('@/lib/core/utils/request', () => ({ generateRequestId: () => 'test-request-id', })) -vi.mock('@sim/logger', () => ({ - createLogger: () => ({ - info: vi.fn(), - warn: vi.fn(), - error: vi.fn(), - debug: vi.fn(), - }), -})) +vi.mock('@sim/logger', () => loggerMock) import { PUT } from './route' diff --git a/apps/sim/app/api/schedules/route.test.ts b/apps/sim/app/api/schedules/route.test.ts index 986e731138..608a1eb068 100644 --- a/apps/sim/app/api/schedules/route.test.ts +++ b/apps/sim/app/api/schedules/route.test.ts @@ -3,6 +3,7 @@ * * @vitest-environment node */ +import { loggerMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -40,13 +41,7 @@ vi.mock('@/lib/core/utils/request', () => ({ generateRequestId: () => 'test-request-id', })) -vi.mock('@sim/logger', () => ({ - createLogger: () => ({ - info: vi.fn(), - warn: vi.fn(), - error: vi.fn(), - }), -})) +vi.mock('@sim/logger', () => loggerMock) import { GET } from '@/app/api/schedules/route' diff --git a/apps/sim/app/api/tools/custom/route.test.ts b/apps/sim/app/api/tools/custom/route.test.ts index 88f61ca129..da83f66153 100644 --- a/apps/sim/app/api/tools/custom/route.test.ts +++ b/apps/sim/app/api/tools/custom/route.test.ts @@ -1,14 +1,14 @@ -import { NextRequest } from 'next/server' /** * Tests for custom tools API routes * * @vitest-environment node */ +import { loggerMock } from '@sim/testing' +import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import { createMockRequest } from '@/app/api/__test-utils__/utils' describe('Custom Tools API Routes', () => { - // Sample data for testing const sampleTools = [ { id: 'tool-1', @@ -66,7 +66,6 @@ describe('Custom Tools API Routes', () => { }, ] - // Mock implementation stubs const mockSelect = vi.fn() const mockFrom = vi.fn() const mockWhere = vi.fn() @@ -82,13 +81,9 @@ describe('Custom Tools API Routes', () => { beforeEach(() => { vi.resetModules() - // Reset all mock implementations mockSelect.mockReturnValue({ from: mockFrom }) mockFrom.mockReturnValue({ where: mockWhere }) - // where() can be called with orderBy(), limit(), or directly awaited - // Create a mock query builder that supports all patterns mockWhere.mockImplementation((condition) => { - // Return an object that is both awaitable and has orderBy() and limit() methods const queryBuilder = { orderBy: mockOrderBy, limit: mockLimit, @@ -101,7 +96,6 @@ describe('Custom Tools API Routes', () => { return queryBuilder }) mockOrderBy.mockImplementation(() => { - // orderBy returns an awaitable query builder const queryBuilder = { limit: mockLimit, then: (resolve: (value: typeof sampleTools) => void) => { @@ -119,7 +113,6 @@ describe('Custom Tools API Routes', () => { mockSet.mockReturnValue({ where: mockWhere }) mockDelete.mockReturnValue({ where: mockWhere }) - // Mock database vi.doMock('@sim/db', () => ({ db: { select: mockSelect, @@ -127,14 +120,11 @@ describe('Custom Tools API Routes', () => { update: mockUpdate, delete: mockDelete, transaction: vi.fn().mockImplementation(async (callback) => { - // Execute the callback with a transaction object that has the same methods - // Create transaction-specific mocks that follow the same pattern const txMockSelect = vi.fn().mockReturnValue({ from: mockFrom }) const txMockInsert = vi.fn().mockReturnValue({ values: mockValues }) const txMockUpdate = vi.fn().mockReturnValue({ set: mockSet }) const txMockDelete = vi.fn().mockReturnValue({ where: mockWhere }) - // Transaction where() should also support the query builder pattern with orderBy const txMockOrderBy = vi.fn().mockImplementation(() => { const queryBuilder = { limit: mockLimit, @@ -160,7 +150,6 @@ describe('Custom Tools API Routes', () => { return queryBuilder }) - // Update mockFrom to return txMockWhere for transaction queries const txMockFrom = vi.fn().mockReturnValue({ where: txMockWhere }) txMockSelect.mockReturnValue({ from: txMockFrom }) @@ -174,7 +163,6 @@ describe('Custom Tools API Routes', () => { }, })) - // Mock schema vi.doMock('@sim/db/schema', () => ({ customTools: { id: 'id', @@ -189,12 +177,10 @@ describe('Custom Tools API Routes', () => { }, })) - // Mock authentication vi.doMock('@/lib/auth', () => ({ getSession: vi.fn().mockResolvedValue(mockSession), })) - // Mock hybrid auth vi.doMock('@/lib/auth/hybrid', () => ({ checkHybridAuth: vi.fn().mockResolvedValue({ success: true, @@ -203,22 +189,12 @@ describe('Custom Tools API Routes', () => { }), })) - // Mock permissions vi.doMock('@/lib/workspaces/permissions/utils', () => ({ getUserEntityPermissions: vi.fn().mockResolvedValue('admin'), })) - // Mock logger - vi.doMock('@sim/logger', () => ({ - createLogger: vi.fn().mockReturnValue({ - info: vi.fn(), - error: vi.fn(), - warn: vi.fn(), - debug: vi.fn(), - }), - })) + vi.doMock('@sim/logger', () => loggerMock) - // Mock drizzle-orm functions vi.doMock('drizzle-orm', async () => { const actual = await vi.importActual('drizzle-orm') return { @@ -232,12 +208,10 @@ describe('Custom Tools API Routes', () => { } }) - // Mock utils vi.doMock('@/lib/core/utils/request', () => ({ generateRequestId: vi.fn().mockReturnValue('test-request-id'), })) - // Mock custom tools operations vi.doMock('@/lib/workflows/custom-tools/operations', () => ({ upsertCustomTools: vi.fn().mockResolvedValue(sampleTools), })) @@ -252,29 +226,23 @@ describe('Custom Tools API Routes', () => { */ describe('GET /api/tools/custom', () => { it('should return tools for authenticated user with workspaceId', async () => { - // Create mock request with workspaceId const req = new NextRequest( 'http://localhost:3000/api/tools/custom?workspaceId=workspace-123' ) - // Simulate DB returning tools with orderBy chain mockWhere.mockReturnValueOnce({ orderBy: mockOrderBy.mockReturnValueOnce(Promise.resolve(sampleTools)), }) - // Import handler after mocks are set up const { GET } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await GET(req) const data = await response.json() - // Verify response expect(response.status).toBe(200) expect(data).toHaveProperty('data') expect(data.data).toEqual(sampleTools) - // Verify DB query expect(mockSelect).toHaveBeenCalled() expect(mockFrom).toHaveBeenCalled() expect(mockWhere).toHaveBeenCalled() @@ -282,12 +250,10 @@ describe('Custom Tools API Routes', () => { }) it('should handle unauthorized access', async () => { - // Create mock request const req = new NextRequest( 'http://localhost:3000/api/tools/custom?workspaceId=workspace-123' ) - // Mock hybrid auth to return unauthorized vi.doMock('@/lib/auth/hybrid', () => ({ checkHybridAuth: vi.fn().mockResolvedValue({ success: false, @@ -295,26 +261,20 @@ describe('Custom Tools API Routes', () => { }), })) - // Import handler after mocks are set up const { GET } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await GET(req) const data = await response.json() - // Verify response expect(response.status).toBe(401) expect(data).toHaveProperty('error', 'Unauthorized') }) it('should handle workflowId parameter', async () => { - // Create mock request with workflowId parameter const req = new NextRequest('http://localhost:3000/api/tools/custom?workflowId=workflow-123') - // Mock workflow lookup to return workspaceId (for limit(1) call) mockLimit.mockResolvedValueOnce([{ workspaceId: 'workspace-123' }]) - // Mock the where() call for fetching tools (returns awaitable query builder) mockWhere.mockImplementationOnce((condition) => { const queryBuilder = { limit: mockLimit, @@ -327,18 +287,14 @@ describe('Custom Tools API Routes', () => { return queryBuilder }) - // Import handler after mocks are set up const { GET } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await GET(req) const data = await response.json() - // Verify response expect(response.status).toBe(200) expect(data).toHaveProperty('data') - // Verify DB query was called expect(mockWhere).toHaveBeenCalled() }) }) @@ -348,7 +304,6 @@ describe('Custom Tools API Routes', () => { */ describe('POST /api/tools/custom', () => { it('should reject unauthorized requests', async () => { - // Mock hybrid auth to return unauthorized vi.doMock('@/lib/auth/hybrid', () => ({ checkHybridAuth: vi.fn().mockResolvedValue({ success: false, @@ -356,39 +311,29 @@ describe('Custom Tools API Routes', () => { }), })) - // Create mock request const req = createMockRequest('POST', { tools: [], workspaceId: 'workspace-123' }) - // Import handler after mocks are set up const { POST } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await POST(req) const data = await response.json() - // Verify response expect(response.status).toBe(401) expect(data).toHaveProperty('error', 'Unauthorized') }) it('should validate request data', async () => { - // Create invalid tool data (missing required fields) const invalidTool = { - // Missing title, schema code: 'return "invalid";', } - // Create mock request with invalid tool and workspaceId const req = createMockRequest('POST', { tools: [invalidTool], workspaceId: 'workspace-123' }) - // Import handler after mocks are set up const { POST } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await POST(req) const data = await response.json() - // Verify response expect(response.status).toBe(400) expect(data).toHaveProperty('error', 'Invalid request data') expect(data).toHaveProperty('details') @@ -400,96 +345,74 @@ describe('Custom Tools API Routes', () => { */ describe('DELETE /api/tools/custom', () => { it('should delete a workspace-scoped tool by ID', async () => { - // Mock finding existing workspace-scoped tool mockLimit.mockResolvedValueOnce([sampleTools[0]]) - // Create mock request with ID and workspaceId parameters const req = new NextRequest( 'http://localhost:3000/api/tools/custom?id=tool-1&workspaceId=workspace-123' ) - // Import handler after mocks are set up const { DELETE } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await DELETE(req) const data = await response.json() - // Verify response expect(response.status).toBe(200) expect(data).toHaveProperty('success', true) - // Verify delete was called with correct parameters expect(mockDelete).toHaveBeenCalled() expect(mockWhere).toHaveBeenCalled() }) it('should reject requests missing tool ID', async () => { - // Create mock request without ID parameter const req = createMockRequest('DELETE') - // Import handler after mocks are set up const { DELETE } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await DELETE(req) const data = await response.json() - // Verify response expect(response.status).toBe(400) expect(data).toHaveProperty('error', 'Tool ID is required') }) it('should handle tool not found', async () => { - // Mock tool not found mockLimit.mockResolvedValueOnce([]) - // Create mock request with non-existent ID const req = new NextRequest('http://localhost:3000/api/tools/custom?id=non-existent') - // Import handler after mocks are set up const { DELETE } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await DELETE(req) const data = await response.json() - // Verify response expect(response.status).toBe(404) expect(data).toHaveProperty('error', 'Tool not found') }) it('should prevent unauthorized deletion of user-scoped tool', async () => { - // Mock hybrid auth for the DELETE request vi.doMock('@/lib/auth/hybrid', () => ({ checkHybridAuth: vi.fn().mockResolvedValue({ success: true, - userId: 'user-456', // Different user + userId: 'user-456', authType: 'session', }), })) - // Mock finding user-scoped tool (no workspaceId) that belongs to user-123 const userScopedTool = { ...sampleTools[0], workspaceId: null, userId: 'user-123' } mockLimit.mockResolvedValueOnce([userScopedTool]) - // Create mock request (no workspaceId for user-scoped tool) const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1') - // Import handler after mocks are set up const { DELETE } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await DELETE(req) const data = await response.json() - // Verify response expect(response.status).toBe(403) expect(data).toHaveProperty('error', 'Access denied') }) it('should reject unauthorized requests', async () => { - // Mock hybrid auth to return unauthorized vi.doMock('@/lib/auth/hybrid', () => ({ checkHybridAuth: vi.fn().mockResolvedValue({ success: false, @@ -497,17 +420,13 @@ describe('Custom Tools API Routes', () => { }), })) - // Create mock request const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1') - // Import handler after mocks are set up const { DELETE } = await import('@/app/api/tools/custom/route') - // Call the handler const response = await DELETE(req) const data = await response.json() - // Verify response expect(response.status).toBe(401) expect(data).toHaveProperty('error', 'Unauthorized') }) diff --git a/apps/sim/app/api/v1/admin/workflows/import/route.ts b/apps/sim/app/api/v1/admin/workflows/import/route.ts index db83f52d07..7c3dd58ad6 100644 --- a/apps/sim/app/api/v1/admin/workflows/import/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/import/route.ts @@ -19,6 +19,7 @@ import { workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import { NextResponse } from 'next/server' +import { parseWorkflowJson } from '@/lib/workflows/operations/import-export' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' import { withAdminAuth } from '@/app/api/v1/admin/middleware' import { @@ -31,7 +32,6 @@ import { type WorkflowImportRequest, type WorkflowVariable, } from '@/app/api/v1/admin/types' -import { parseWorkflowJson } from '@/stores/workflows/json/importer' const logger = createLogger('AdminWorkflowImportAPI') diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts index fa569b7f24..6bb6a4db66 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts @@ -31,6 +31,7 @@ import { NextResponse } from 'next/server' import { extractWorkflowName, extractWorkflowsFromZip, + parseWorkflowJson, } from '@/lib/workflows/operations/import-export' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' @@ -46,7 +47,6 @@ import { type WorkspaceImportRequest, type WorkspaceImportResponse, } from '@/app/api/v1/admin/types' -import { parseWorkflowJson } from '@/stores/workflows/json/importer' const logger = createLogger('AdminWorkspaceImportAPI') diff --git a/apps/sim/app/api/workflows/[id]/form/status/route.ts b/apps/sim/app/api/workflows/[id]/form/status/route.ts new file mode 100644 index 0000000000..a14abe736f --- /dev/null +++ b/apps/sim/app/api/workflows/[id]/form/status/route.ts @@ -0,0 +1,47 @@ +import { db } from '@sim/db' +import { form } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import type { NextRequest } from 'next/server' +import { getSession } from '@/lib/auth' +import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' + +const logger = createLogger('FormStatusAPI') + +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + try { + const session = await getSession() + + if (!session) { + return createErrorResponse('Unauthorized', 401) + } + + const { id: workflowId } = await params + + const formResult = await db + .select({ + id: form.id, + identifier: form.identifier, + title: form.title, + isActive: form.isActive, + }) + .from(form) + .where(and(eq(form.workflowId, workflowId), eq(form.isActive, true))) + .limit(1) + + if (formResult.length === 0) { + return createSuccessResponse({ + isDeployed: false, + form: null, + }) + } + + return createSuccessResponse({ + isDeployed: true, + form: formResult[0], + }) + } catch (error: any) { + logger.error('Error fetching form status:', error) + return createErrorResponse(error.message || 'Failed to fetch form status', 500) + } +} diff --git a/apps/sim/app/api/workflows/[id]/route.test.ts b/apps/sim/app/api/workflows/[id]/route.test.ts index 12ea444173..35f3d3473c 100644 --- a/apps/sim/app/api/workflows/[id]/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/route.test.ts @@ -5,6 +5,7 @@ * @vitest-environment node */ +import { loggerMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -20,14 +21,7 @@ vi.mock('@/lib/auth', () => ({ getSession: () => mockGetSession(), })) -vi.mock('@sim/logger', () => ({ - createLogger: vi.fn(() => ({ - debug: vi.fn(), - info: vi.fn(), - warn: vi.fn(), - error: vi.fn(), - })), -})) +vi.mock('@sim/logger', () => loggerMock) vi.mock('@/lib/workflows/persistence/utils', () => ({ loadWorkflowFromNormalizedTables: (workflowId: string) => diff --git a/apps/sim/app/chat/[identifier]/chat.tsx b/apps/sim/app/chat/[identifier]/chat.tsx index 0a43ea1849..926f96a064 100644 --- a/apps/sim/app/chat/[identifier]/chat.tsx +++ b/apps/sim/app/chat/[identifier]/chat.tsx @@ -460,43 +460,22 @@ export default function ChatClient({ identifier }: { identifier: string }) { ) if (error) { - return + return } if (authRequired) { - const title = new URLSearchParams(window.location.search).get('title') || 'chat' - const primaryColor = - new URLSearchParams(window.location.search).get('color') || 'var(--brand-primary-hover-hex)' + // const title = new URLSearchParams(window.location.search).get('title') || 'chat' + // const primaryColor = + // new URLSearchParams(window.location.search).get('color') || 'var(--brand-primary-hover-hex)' if (authRequired === 'password') { - return ( - - ) + return } if (authRequired === 'email') { - return ( - - ) + return } if (authRequired === 'sso') { - return ( - - ) + return } } diff --git a/apps/sim/app/chat/components/auth/email/email-auth.tsx b/apps/sim/app/chat/components/auth/email/email-auth.tsx index fb2a5d8036..d6ba3de532 100644 --- a/apps/sim/app/chat/components/auth/email/email-auth.tsx +++ b/apps/sim/app/chat/components/auth/email/email-auth.tsx @@ -2,14 +2,16 @@ import { type KeyboardEvent, useEffect, useState } from 'react' import { createLogger } from '@sim/logger' -import { Button } from '@/components/ui/button' -import { Input } from '@/components/ui/input' +import { Input } from '@/components/emcn' import { InputOTP, InputOTPGroup, InputOTPSlot } from '@/components/ui/input-otp' import { Label } from '@/components/ui/label' import { cn } from '@/lib/core/utils/cn' import { quickValidateEmail } from '@/lib/messaging/email/validation' import { inter } from '@/app/_styles/fonts/inter/inter' import { soehne } from '@/app/_styles/fonts/soehne/soehne' +import AuthBackground from '@/app/(auth)/components/auth-background' +import { BrandedButton } from '@/app/(auth)/components/branded-button' +import { SupportFooter } from '@/app/(auth)/components/support-footer' import Nav from '@/app/(landing)/components/nav/nav' const logger = createLogger('EmailAuth') @@ -17,8 +19,6 @@ const logger = createLogger('EmailAuth') interface EmailAuthProps { identifier: string onAuthSuccess: () => void - title?: string - primaryColor?: string } const validateEmailField = (emailValue: string): string[] => { @@ -37,57 +37,19 @@ const validateEmailField = (emailValue: string): string[] => { return errors } -export default function EmailAuth({ - identifier, - onAuthSuccess, - title = 'chat', - primaryColor = 'var(--brand-primary-hover-hex)', -}: EmailAuthProps) { - // Email auth state +export default function EmailAuth({ identifier, onAuthSuccess }: EmailAuthProps) { const [email, setEmail] = useState('') const [authError, setAuthError] = useState(null) const [isSendingOtp, setIsSendingOtp] = useState(false) const [isVerifyingOtp, setIsVerifyingOtp] = useState(false) const [emailErrors, setEmailErrors] = useState([]) const [showEmailValidationError, setShowEmailValidationError] = useState(false) - const [buttonClass, setButtonClass] = useState('auth-button-gradient') - // OTP verification state const [showOtpVerification, setShowOtpVerification] = useState(false) const [otpValue, setOtpValue] = useState('') const [countdown, setCountdown] = useState(0) const [isResendDisabled, setIsResendDisabled] = useState(false) - useEffect(() => { - // Check if CSS variable has been customized - const checkCustomBrand = () => { - const computedStyle = getComputedStyle(document.documentElement) - const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() - - // Check if the CSS variable exists and is different from the default - if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('auth-button-custom') - } else { - setButtonClass('auth-button-gradient') - } - } - - checkCustomBrand() - - // Also check on window resize or theme changes - window.addEventListener('resize', checkCustomBrand) - const observer = new MutationObserver(checkCustomBrand) - observer.observe(document.documentElement, { - attributes: true, - attributeFilter: ['style', 'class'], - }) - - return () => { - window.removeEventListener('resize', checkCustomBrand) - observer.disconnect() - } - }, []) - useEffect(() => { if (countdown > 0) { const timer = setTimeout(() => setCountdown(countdown - 1), 1000) @@ -98,7 +60,6 @@ export default function EmailAuth({ } }, [countdown, isResendDisabled]) - // Handle email input key down const handleEmailKeyDown = (e: KeyboardEvent) => { if (e.key === 'Enter') { e.preventDefault() @@ -109,21 +70,16 @@ export default function EmailAuth({ const handleEmailChange = (e: React.ChangeEvent) => { const newEmail = e.target.value setEmail(newEmail) - - // Silently validate but don't show errors until submit const errors = validateEmailField(newEmail) setEmailErrors(errors) setShowEmailValidationError(false) } - // Handle sending OTP const handleSendOtp = async () => { - // Validate email on submit const emailValidationErrors = validateEmailField(email) setEmailErrors(emailValidationErrors) setShowEmailValidationError(emailValidationErrors.length > 0) - // If there are validation errors, stop submission if (emailValidationErrors.length > 0) { return } @@ -217,7 +173,6 @@ export default function EmailAuth({ return } - // Don't show success message in error state, just reset OTP setOtpValue('') } catch (error) { logger.error('Error resending OTP:', error) @@ -230,36 +185,34 @@ export default function EmailAuth({ } return ( -
    -