diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e3a1bdb3b66..3b67ff38f56 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,9 +2,9 @@ name: CI on: push: - branches: [main, staging] + branches: [main, staging, dev] pull_request: - branches: [main, staging] + branches: [main, staging, dev] concurrency: group: ci-${{ github.ref }} @@ -23,7 +23,7 @@ jobs: detect-version: name: Detect Version runs-on: blacksmith-4vcpu-ubuntu-2404 - if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging') + if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging' || github.ref == 'refs/heads/dev') outputs: version: ${{ steps.extract.outputs.version }} is_release: ${{ steps.extract.outputs.is_release }} @@ -49,7 +49,7 @@ jobs: build-amd64: name: Build AMD64 needs: [test-build, detect-version] - if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging') + if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging' || github.ref == 'refs/heads/dev') runs-on: blacksmith-8vcpu-ubuntu-2404 permissions: contents: read @@ -75,8 +75,8 @@ jobs: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v4 with: - role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }} - aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || secrets.STAGING_AWS_REGION }} + role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }} + aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_REGION || secrets.STAGING_AWS_REGION }} - name: Login to Amazon ECR id: login-ecr @@ -109,6 +109,8 @@ jobs: # ECR tags (always build for ECR) if [ "${{ github.ref }}" = "refs/heads/main" ]; then ECR_TAG="latest" + elif [ "${{ github.ref }}" = "refs/heads/dev" ]; then + ECR_TAG="dev" else ECR_TAG="staging" fi diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml index 44e8636d909..8028c433638 100644 --- a/.github/workflows/images.yml +++ b/.github/workflows/images.yml @@ -36,8 +36,8 @@ jobs: - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v4 with: - role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }} - aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || secrets.STAGING_AWS_REGION }} + role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }} + aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_REGION || secrets.STAGING_AWS_REGION }} - name: Login to Amazon ECR id: login-ecr @@ -70,6 +70,8 @@ jobs: # ECR tags (always build for ECR) if [ "${{ github.ref }}" = "refs/heads/main" ]; then ECR_TAG="latest" + elif [ "${{ github.ref }}" = "refs/heads/dev" ]; then + ECR_TAG="dev" else ECR_TAG="staging" fi diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx index 6f53db86f8b..44b5e1de985 100644 --- a/apps/docs/components/icons.tsx +++ b/apps/docs/components/icons.tsx @@ -683,6 +683,45 @@ export function SerperIcon(props: SVGProps) { ) } +export function TailscaleIcon(props: SVGProps) { + return ( + + + + + + + + + + ) +} + export function TavilyIcon(props: SVGProps) { return ( @@ -2041,6 +2080,19 @@ export function Mem0Icon(props: SVGProps) { ) } +export function ExtendIcon(props: SVGProps) { + return ( + + + + ) +} + export function EvernoteIcon(props: SVGProps) { return ( @@ -2152,6 +2204,17 @@ export function LangsmithIcon(props: SVGProps) { ) } +export function LaunchDarklyIcon(props: SVGProps) { + return ( + + + + ) +} + export function LemlistIcon(props: SVGProps) { return ( @@ -4502,6 +4565,24 @@ export function DynamoDBIcon(props: SVGProps) { ) } +export function SecretsManagerIcon(props: SVGProps) { + return ( + + + + + + + + + + + ) +} + export function SQSIcon(props: SVGProps) { return ( = { enrich: EnrichSoIcon, evernote: EvernoteIcon, exa: ExaAIIcon, + extend_v2: ExtendIcon, fathom: FathomIcon, file_v3: DocumentIcon, firecrawl: FirecrawlIcon, @@ -269,6 +274,7 @@ export const blockTypeToIconMap: Record = { ketch: KetchIcon, knowledge: PackageSearchIcon, langsmith: LangsmithIcon, + launchdarkly: LaunchDarklyIcon, lemlist: LemlistIcon, linear: LinearIcon, linkedin: LinkedInIcon, @@ -317,6 +323,7 @@ export const blockTypeToIconMap: Record = { s3: S3Icon, salesforce: SalesforceIcon, search: SearchIcon, + secrets_manager: SecretsManagerIcon, sendgrid: SendgridIcon, sentry: SentryIcon, serper: SerperIcon, @@ -333,6 +340,7 @@ export const blockTypeToIconMap: Record = { stripe: StripeIcon, stt_v2: STTIcon, supabase: SupabaseIcon, + tailscale: TailscaleIcon, tavily: TavilyIcon, telegram: TelegramIcon, textract_v2: TextractIcon, diff --git a/apps/docs/content/docs/en/tools/attio.mdx b/apps/docs/content/docs/en/tools/attio.mdx index 85ad63126b9..5a36922ee42 100644 --- a/apps/docs/content/docs/en/tools/attio.mdx +++ b/apps/docs/content/docs/en/tools/attio.mdx @@ -359,6 +359,35 @@ List tasks in Attio, optionally filtered by record, assignee, or completion stat | ↳ `createdAt` | string | When the task was created | | `count` | number | Number of tasks returned | +### `attio_get_task` + +Get a single task by ID from Attio + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `taskId` | string | Yes | The ID of the task to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskId` | string | The task ID | +| `content` | string | The task content | +| `deadlineAt` | string | The task deadline | +| `isCompleted` | boolean | Whether the task is completed | +| `linkedRecords` | array | Records linked to this task | +| ↳ `targetObjectId` | string | The linked object ID | +| ↳ `targetRecordId` | string | The linked record ID | +| `assignees` | array | Task assignees | +| ↳ `type` | string | The assignee actor type \(e.g. workspace-member\) | +| ↳ `id` | string | The assignee actor ID | +| `createdByActor` | object | The actor who created this task | +| ↳ `type` | string | The actor type \(e.g. workspace-member, api-token, system\) | +| ↳ `id` | string | The actor ID | +| `createdAt` | string | When the task was created | + ### `attio_create_task` Create a task in Attio @@ -1012,8 +1041,8 @@ Update a webhook in Attio (target URL and/or subscriptions) | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `webhookId` | string | Yes | The webhook ID to update | -| `targetUrl` | string | Yes | HTTPS target URL for webhook delivery | -| `subscriptions` | string | Yes | JSON array of subscriptions, e.g. \[\{"event_type":"note.created"\}\] | +| `targetUrl` | string | No | HTTPS target URL for webhook delivery | +| `subscriptions` | string | No | JSON array of subscriptions, e.g. \[\{"event_type":"note.created"\}\] | #### Output diff --git a/apps/docs/content/docs/en/tools/extend.mdx b/apps/docs/content/docs/en/tools/extend.mdx new file mode 100644 index 00000000000..8cecfca2783 --- /dev/null +++ b/apps/docs/content/docs/en/tools/extend.mdx @@ -0,0 +1,39 @@ +--- +title: Extend +description: Parse and extract content from documents +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +## Usage Instructions + +Integrate Extend AI into the workflow. Parse and extract structured content from documents or file references. + + + +## Tools + +### `extend_parser` + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `filePath` | string | No | URL to a document to be processed | +| `file` | file | No | Document file to be processed | +| `fileUpload` | object | No | File upload data from file-upload component | +| `outputFormat` | string | No | Target output format \(markdown or spatial\). Defaults to markdown. | +| `chunking` | string | No | Chunking strategy \(page, document, or section\). Defaults to page. | +| `engine` | string | No | Parsing engine \(parse_performance or parse_light\). Defaults to parse_performance. | +| `apiKey` | string | Yes | Extend API key | + +#### Output + +This tool does not produce any outputs. + + diff --git a/apps/docs/content/docs/en/tools/launchdarkly.mdx b/apps/docs/content/docs/en/tools/launchdarkly.mdx new file mode 100644 index 00000000000..3cadbcb7f27 --- /dev/null +++ b/apps/docs/content/docs/en/tools/launchdarkly.mdx @@ -0,0 +1,388 @@ +--- +title: LaunchDarkly +description: Manage feature flags with LaunchDarkly. +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[LaunchDarkly](https://launchdarkly.com/) is a feature management platform that enables teams to safely deploy, control, and measure their software features at scale. + +With the LaunchDarkly integration in Sim, you can: + +- **Feature flag management** — List, create, update, toggle, and delete feature flags programmatically. Toggle flags on or off in specific environments using LaunchDarkly's semantic patch API. +- **Flag status monitoring** — Check whether a flag is active, inactive, new, or launched in a given environment. Track the last time a flag was evaluated. +- **Project and environment management** — List all projects and their environments to understand your LaunchDarkly organization structure. +- **User segments** — List user segments within a project and environment to understand how your audience is organized for targeting. +- **Team visibility** — List account members and their roles for auditing and access management workflows. +- **Audit log** — Retrieve recent audit log entries to track who changed what, when. Filter entries by resource type for targeted monitoring. + +In Sim, the LaunchDarkly integration enables your agents to automate feature flag operations as part of their workflows. This allows for automation scenarios such as toggling flags on/off based on deployment pipeline events, monitoring flag status and alerting on stale or unused flags, auditing flag changes by querying the audit log after deployments, syncing flag metadata with your project management tools, and listing all feature flags across projects for governance. + +## Authentication + +This integration uses a LaunchDarkly API key. You can create personal access tokens or service tokens in the LaunchDarkly dashboard under **Account Settings > Authorization**. The API key is passed directly in the `Authorization` header (no `Bearer` prefix). + +## Need Help? + +If you encounter issues with the LaunchDarkly integration, contact us at [help@sim.ai](mailto:help@sim.ai) +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate LaunchDarkly into your workflow. List, create, update, toggle, and delete feature flags. Manage projects, environments, segments, members, and audit logs. Requires API Key. + + + +## Tools + +### `launchdarkly_create_flag` + +Create a new feature flag in a LaunchDarkly project. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key to create the flag in | +| `name` | string | Yes | Human-readable name for the feature flag | +| `key` | string | Yes | Unique key for the feature flag \(used in code\) | +| `description` | string | No | Description of the feature flag | +| `tags` | string | No | Comma-separated list of tags | +| `temporary` | boolean | No | Whether the flag is temporary \(default true\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The unique key of the feature flag | +| `name` | string | The human-readable name of the feature flag | +| `kind` | string | The type of flag \(boolean or multivariate\) | +| `description` | string | Description of the feature flag | +| `temporary` | boolean | Whether the flag is temporary | +| `archived` | boolean | Whether the flag is archived | +| `deprecated` | boolean | Whether the flag is deprecated | +| `creationDate` | number | Unix timestamp in milliseconds when the flag was created | +| `tags` | array | Tags applied to the flag | +| `variations` | array | The variations for this feature flag | +| ↳ `value` | string | The variation value | +| ↳ `name` | string | The variation name | +| ↳ `description` | string | The variation description | +| `maintainerId` | string | The ID of the member who maintains this flag | + +### `launchdarkly_delete_flag` + +Delete a feature flag from a LaunchDarkly project. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key | +| `flagKey` | string | Yes | The feature flag key to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `deleted` | boolean | Whether the flag was successfully deleted | + +### `launchdarkly_get_audit_log` + +List audit log entries from your LaunchDarkly account. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `limit` | number | No | Maximum number of entries to return \(default 10, max 20\) | +| `spec` | string | No | Filter expression \(e.g., "resourceType:flag"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `entries` | array | List of audit log entries | +| ↳ `id` | string | The audit log entry ID | +| ↳ `date` | number | Unix timestamp in milliseconds | +| ↳ `kind` | string | The type of action performed | +| ↳ `name` | string | The name of the resource acted on | +| ↳ `description` | string | Full description of the action | +| ↳ `shortDescription` | string | Short description of the action | +| ↳ `memberEmail` | string | Email of the member who performed the action | +| ↳ `targetName` | string | Name of the target resource | +| ↳ `targetKind` | string | Kind of the target resource | +| `totalCount` | number | Total number of audit log entries | + +### `launchdarkly_get_flag` + +Get a single feature flag by key from a LaunchDarkly project. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key | +| `flagKey` | string | Yes | The feature flag key | +| `environmentKey` | string | No | Filter flag configuration to a specific environment | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The unique key of the feature flag | +| `name` | string | The human-readable name of the feature flag | +| `kind` | string | The type of flag \(boolean or multivariate\) | +| `description` | string | Description of the feature flag | +| `temporary` | boolean | Whether the flag is temporary | +| `archived` | boolean | Whether the flag is archived | +| `deprecated` | boolean | Whether the flag is deprecated | +| `creationDate` | number | Unix timestamp in milliseconds when the flag was created | +| `tags` | array | Tags applied to the flag | +| `variations` | array | The variations for this feature flag | +| ↳ `value` | string | The variation value | +| ↳ `name` | string | The variation name | +| ↳ `description` | string | The variation description | +| `maintainerId` | string | The ID of the member who maintains this flag | +| `on` | boolean | Whether the flag is on in the requested environment \(null if no single environment was specified\) | + +### `launchdarkly_get_flag_status` + +Get the status of a feature flag across environments (active, inactive, launched, etc.). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key | +| `flagKey` | string | Yes | The feature flag key | +| `environmentKey` | string | Yes | The environment key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `name` | string | The flag status \(new, active, inactive, launched\) | +| `lastRequested` | string | Timestamp of the last evaluation | +| `defaultVal` | string | The default variation value | + +### `launchdarkly_list_environments` + +List environments in a LaunchDarkly project. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key to list environments for | +| `limit` | number | No | Maximum number of environments to return \(default 20\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `environments` | array | List of environments | +| ↳ `id` | string | The environment ID | +| ↳ `key` | string | The unique environment key | +| ↳ `name` | string | The environment name | +| ↳ `color` | string | The color assigned to this environment | +| ↳ `apiKey` | string | The server-side SDK key for this environment | +| ↳ `mobileKey` | string | The mobile SDK key for this environment | +| ↳ `tags` | array | Tags applied to the environment | +| `totalCount` | number | Total number of environments | + +### `launchdarkly_list_flags` + +List feature flags in a LaunchDarkly project. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key to list flags for | +| `environmentKey` | string | No | Filter flag configurations to a specific environment | +| `tag` | string | No | Filter flags by tag name | +| `limit` | number | No | Maximum number of flags to return \(default 20\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `flags` | array | List of feature flags | +| ↳ `key` | string | The unique key of the feature flag | +| ↳ `name` | string | The human-readable name of the feature flag | +| ↳ `kind` | string | The type of flag \(boolean or multivariate\) | +| ↳ `description` | string | Description of the feature flag | +| ↳ `temporary` | boolean | Whether the flag is temporary | +| ↳ `archived` | boolean | Whether the flag is archived | +| ↳ `deprecated` | boolean | Whether the flag is deprecated | +| ↳ `creationDate` | number | Unix timestamp in milliseconds when the flag was created | +| ↳ `tags` | array | Tags applied to the flag | +| ↳ `variations` | array | The variations for this feature flag | +| ↳ `value` | string | The variation value | +| ↳ `name` | string | The variation name | +| ↳ `description` | string | The variation description | +| ↳ `maintainerId` | string | The ID of the member who maintains this flag | +| `totalCount` | number | Total number of flags | + +### `launchdarkly_list_members` + +List account members in your LaunchDarkly organization. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `limit` | number | No | Maximum number of members to return \(default 20\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `members` | array | List of account members | +| ↳ `id` | string | The member ID | +| ↳ `email` | string | The member email address | +| ↳ `firstName` | string | The member first name | +| ↳ `lastName` | string | The member last name | +| ↳ `role` | string | The member role \(reader, writer, admin, owner\) | +| ↳ `lastSeen` | number | Unix timestamp of last activity | +| ↳ `creationDate` | number | Unix timestamp when the member was created | +| ↳ `verified` | boolean | Whether the member email is verified | +| `totalCount` | number | Total number of members | + +### `launchdarkly_list_projects` + +List all projects in your LaunchDarkly account. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `limit` | number | No | Maximum number of projects to return \(default 20\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `projects` | array | List of projects | +| ↳ `id` | string | The project ID | +| ↳ `key` | string | The unique project key | +| ↳ `name` | string | The project name | +| ↳ `tags` | array | Tags applied to the project | +| `totalCount` | number | Total number of projects | + +### `launchdarkly_list_segments` + +List user segments in a LaunchDarkly project and environment. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key | +| `environmentKey` | string | Yes | The environment key | +| `limit` | number | No | Maximum number of segments to return \(default 20\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `segments` | array | List of user segments | +| ↳ `key` | string | The unique segment key | +| ↳ `name` | string | The segment name | +| ↳ `description` | string | The segment description | +| ↳ `tags` | array | Tags applied to the segment | +| ↳ `creationDate` | number | Unix timestamp in milliseconds when the segment was created | +| ↳ `unbounded` | boolean | Whether this is an unbounded \(big\) segment | +| ↳ `included` | array | User keys explicitly included in the segment | +| ↳ `excluded` | array | User keys explicitly excluded from the segment | +| `totalCount` | number | Total number of segments | + +### `launchdarkly_toggle_flag` + +Toggle a feature flag on or off in a specific LaunchDarkly environment. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key | +| `flagKey` | string | Yes | The feature flag key to toggle | +| `environmentKey` | string | Yes | The environment key to toggle the flag in | +| `enabled` | boolean | Yes | Whether to turn the flag on \(true\) or off \(false\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The unique key of the feature flag | +| `name` | string | The human-readable name of the feature flag | +| `kind` | string | The type of flag \(boolean or multivariate\) | +| `description` | string | Description of the feature flag | +| `temporary` | boolean | Whether the flag is temporary | +| `archived` | boolean | Whether the flag is archived | +| `deprecated` | boolean | Whether the flag is deprecated | +| `creationDate` | number | Unix timestamp in milliseconds when the flag was created | +| `tags` | array | Tags applied to the flag | +| `variations` | array | The variations for this feature flag | +| ↳ `value` | string | The variation value | +| ↳ `name` | string | The variation name | +| ↳ `description` | string | The variation description | +| `maintainerId` | string | The ID of the member who maintains this flag | +| `on` | boolean | Whether the flag is now on in the target environment | + +### `launchdarkly_update_flag` + +Update a feature flag metadata (name, description, tags, temporary, archived) using semantic patch. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | LaunchDarkly API key | +| `projectKey` | string | Yes | The project key | +| `flagKey` | string | Yes | The feature flag key to update | +| `updateName` | string | No | New name for the flag | +| `updateDescription` | string | No | New description for the flag | +| `addTags` | string | No | Comma-separated tags to add | +| `removeTags` | string | No | Comma-separated tags to remove | +| `archive` | boolean | No | Set to true to archive, false to restore | +| `comment` | string | No | Optional comment explaining the update | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The unique key of the feature flag | +| `name` | string | The human-readable name of the feature flag | +| `kind` | string | The type of flag \(boolean or multivariate\) | +| `description` | string | Description of the feature flag | +| `temporary` | boolean | Whether the flag is temporary | +| `archived` | boolean | Whether the flag is archived | +| `deprecated` | boolean | Whether the flag is deprecated | +| `creationDate` | number | Unix timestamp in milliseconds when the flag was created | +| `tags` | array | Tags applied to the flag | +| `variations` | array | The variations for this feature flag | +| ↳ `value` | string | The variation value | +| ↳ `name` | string | The variation name | +| ↳ `description` | string | The variation description | +| `maintainerId` | string | The ID of the member who maintains this flag | + + diff --git a/apps/docs/content/docs/en/tools/meta.json b/apps/docs/content/docs/en/tools/meta.json index 49ee064ffb1..20dd3a4bf9f 100644 --- a/apps/docs/content/docs/en/tools/meta.json +++ b/apps/docs/content/docs/en/tools/meta.json @@ -39,6 +39,7 @@ "enrich", "evernote", "exa", + "extend", "fathom", "file", "firecrawl", @@ -87,6 +88,7 @@ "ketch", "knowledge", "langsmith", + "launchdarkly", "lemlist", "linear", "linkedin", @@ -135,6 +137,7 @@ "s3", "salesforce", "search", + "secrets_manager", "sendgrid", "sentry", "serper", @@ -152,6 +155,7 @@ "stt", "supabase", "table", + "tailscale", "tavily", "telegram", "textract", diff --git a/apps/docs/content/docs/en/tools/secrets_manager.mdx b/apps/docs/content/docs/en/tools/secrets_manager.mdx new file mode 100644 index 00000000000..81fb8d21165 --- /dev/null +++ b/apps/docs/content/docs/en/tools/secrets_manager.mdx @@ -0,0 +1,157 @@ +--- +title: AWS Secrets Manager +description: Connect to AWS Secrets Manager +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[AWS Secrets Manager](https://aws.amazon.com/secrets-manager/) is a secrets management service that helps you protect access to your applications, services, and IT resources. It enables you to rotate, manage, and retrieve database credentials, API keys, and other secrets throughout their lifecycle. + +With AWS Secrets Manager, you can: + +- **Securely store secrets**: Encrypt secrets at rest using AWS KMS encryption keys +- **Retrieve secrets programmatically**: Access secrets from your applications and workflows without hardcoding credentials +- **Rotate secrets automatically**: Configure automatic rotation for supported services like RDS, Redshift, and DocumentDB +- **Audit access**: Track secret access and changes through AWS CloudTrail integration +- **Control access with IAM**: Use fine-grained IAM policies to manage who can access which secrets +- **Replicate across regions**: Automatically replicate secrets to multiple AWS regions for disaster recovery + +In Sim, the AWS Secrets Manager integration allows your workflows to securely retrieve credentials and configuration values at runtime, create and manage secrets as part of automation pipelines, and maintain a centralized secrets store that your agents can access. This is particularly useful for workflows that need to authenticate with external services, rotate credentials, or manage sensitive configuration across environments — all without exposing secrets in your workflow definitions. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate AWS Secrets Manager into the workflow. Can retrieve, create, update, list, and delete secrets. + + + +## Tools + +### `secrets_manager_get_secret` + +Retrieve a secret value from AWS Secrets Manager + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `region` | string | Yes | AWS region \(e.g., us-east-1\) | +| `accessKeyId` | string | Yes | AWS access key ID | +| `secretAccessKey` | string | Yes | AWS secret access key | +| `secretId` | string | Yes | The name or ARN of the secret to retrieve | +| `versionId` | string | No | The unique identifier of the version to retrieve | +| `versionStage` | string | No | The staging label of the version to retrieve \(e.g., AWSCURRENT, AWSPREVIOUS\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `name` | string | Name of the secret | +| `secretValue` | string | The decrypted secret value | +| `arn` | string | ARN of the secret | +| `versionId` | string | Version ID of the secret | +| `versionStages` | array | Staging labels attached to this version | +| `createdDate` | string | Date the secret was created | + +### `secrets_manager_list_secrets` + +List secrets stored in AWS Secrets Manager + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `region` | string | Yes | AWS region \(e.g., us-east-1\) | +| `accessKeyId` | string | Yes | AWS access key ID | +| `secretAccessKey` | string | Yes | AWS secret access key | +| `maxResults` | number | No | Maximum number of secrets to return \(1-100, default 100\) | +| `nextToken` | string | No | Pagination token from a previous request | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `secrets` | json | List of secrets with name, ARN, description, and dates | +| `nextToken` | string | Pagination token for the next page of results | +| `count` | number | Number of secrets returned | + +### `secrets_manager_create_secret` + +Create a new secret in AWS Secrets Manager + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `region` | string | Yes | AWS region \(e.g., us-east-1\) | +| `accessKeyId` | string | Yes | AWS access key ID | +| `secretAccessKey` | string | Yes | AWS secret access key | +| `name` | string | Yes | Name of the secret to create | +| `secretValue` | string | Yes | The secret value \(plain text or JSON string\) | +| `description` | string | No | Description of the secret | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `message` | string | Operation status message | +| `name` | string | Name of the created secret | +| `arn` | string | ARN of the created secret | +| `versionId` | string | Version ID of the created secret | + +### `secrets_manager_update_secret` + +Update the value of an existing secret in AWS Secrets Manager + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `region` | string | Yes | AWS region \(e.g., us-east-1\) | +| `accessKeyId` | string | Yes | AWS access key ID | +| `secretAccessKey` | string | Yes | AWS secret access key | +| `secretId` | string | Yes | The name or ARN of the secret to update | +| `secretValue` | string | Yes | The new secret value \(plain text or JSON string\) | +| `description` | string | No | Updated description of the secret | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `message` | string | Operation status message | +| `name` | string | Name of the updated secret | +| `arn` | string | ARN of the updated secret | +| `versionId` | string | Version ID of the updated secret | + +### `secrets_manager_delete_secret` + +Delete a secret from AWS Secrets Manager + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `region` | string | Yes | AWS region \(e.g., us-east-1\) | +| `accessKeyId` | string | Yes | AWS access key ID | +| `secretAccessKey` | string | Yes | AWS secret access key | +| `secretId` | string | Yes | The name or ARN of the secret to delete | +| `recoveryWindowInDays` | number | No | Number of days before permanent deletion \(7-30, default 30\) | +| `forceDelete` | boolean | No | If true, immediately delete without recovery window | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `message` | string | Operation status message | +| `name` | string | Name of the deleted secret | +| `arn` | string | ARN of the deleted secret | +| `deletionDate` | string | Scheduled deletion date | + + diff --git a/apps/docs/content/docs/en/tools/tailscale.mdx b/apps/docs/content/docs/en/tools/tailscale.mdx new file mode 100644 index 00000000000..17d71352e3a --- /dev/null +++ b/apps/docs/content/docs/en/tools/tailscale.mdx @@ -0,0 +1,490 @@ +--- +title: Tailscale +description: Manage devices and network settings in your Tailscale tailnet +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +## Overview + +[Tailscale](https://tailscale.com) is a zero-config mesh VPN built on WireGuard that makes it easy to connect devices, services, and users across any network. The Tailscale block lets you automate network management tasks like device provisioning, access control, route management, and DNS configuration directly from your Sim workflows. + +## Authentication + +The Tailscale block uses API key authentication. To get an API key: + +1. Go to the [Tailscale admin console](https://login.tailscale.com/admin/settings/keys) +2. Navigate to **Settings > Keys** +3. Click **Generate API key** +4. Set an expiry (1-90 days) and copy the key (starts with `tskey-api-`) + +You must have an **Owner**, **Admin**, **IT admin**, or **Network admin** role to generate API keys. + +## Tailnet Identifier + +Every operation requires a **tailnet** parameter. This is typically your organization's domain name (e.g., `example.com`). You can also use `"-"` to refer to your default tailnet. + +## Common Use Cases + +- **Device inventory**: List and monitor all devices connected to your network +- **Automated provisioning**: Create and manage auth keys to pre-authorize new devices +- **Access control**: Authorize or deauthorize devices, manage device tags for ACL policies +- **Route management**: View and enable subnet routes for devices acting as subnet routers +- **DNS management**: Configure nameservers, MagicDNS, and search paths +- **Key lifecycle**: Create, list, inspect, and revoke auth keys +- **User auditing**: List all users in the tailnet and their roles +- **Policy review**: Retrieve the current ACL policy for inspection or backup + +## Tools + +### `tailscale_list_devices` + +List all devices in the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `devices` | array | List of devices in the tailnet | +| ↳ `id` | string | Device ID | +| ↳ `name` | string | Device name | +| ↳ `hostname` | string | Device hostname | +| ↳ `user` | string | Associated user | +| ↳ `os` | string | Operating system | +| ↳ `clientVersion` | string | Tailscale client version | +| ↳ `addresses` | array | Tailscale IP addresses | +| ↳ `tags` | array | Device tags | +| ↳ `authorized` | boolean | Whether the device is authorized | +| ↳ `blocksIncomingConnections` | boolean | Whether the device blocks incoming connections | +| ↳ `lastSeen` | string | Last seen timestamp | +| ↳ `created` | string | Creation timestamp | +| `count` | number | Total number of devices | + +### `tailscale_get_device` + +Get details of a specific device by ID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `deviceId` | string | Yes | Device ID | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Device ID | +| `name` | string | Device name | +| `hostname` | string | Device hostname | +| `user` | string | Associated user | +| `os` | string | Operating system | +| `clientVersion` | string | Tailscale client version | +| `addresses` | array | Tailscale IP addresses | +| `tags` | array | Device tags | +| `authorized` | boolean | Whether the device is authorized | +| `blocksIncomingConnections` | boolean | Whether the device blocks incoming connections | +| `lastSeen` | string | Last seen timestamp | +| `created` | string | Creation timestamp | +| `enabledRoutes` | array | Approved subnet routes | +| `advertisedRoutes` | array | Requested subnet routes | +| `isExternal` | boolean | Whether the device is external | +| `updateAvailable` | boolean | Whether an update is available | +| `machineKey` | string | Machine key | +| `nodeKey` | string | Node key | + +### `tailscale_delete_device` + +Remove a device from the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `deviceId` | string | Yes | Device ID to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the device was successfully deleted | +| `deviceId` | string | ID of the deleted device | + +### `tailscale_authorize_device` + +Authorize or deauthorize a device on the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `deviceId` | string | Yes | Device ID to authorize | +| `authorized` | boolean | Yes | Whether to authorize \(true\) or deauthorize \(false\) the device | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the operation succeeded | +| `deviceId` | string | Device ID | +| `authorized` | boolean | Authorization status after the operation | + +### `tailscale_set_device_tags` + +Set tags on a device in the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `deviceId` | string | Yes | Device ID | +| `tags` | string | Yes | Comma-separated list of tags \(e.g., "tag:server,tag:production"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the tags were successfully set | +| `deviceId` | string | Device ID | +| `tags` | array | Tags set on the device | + +### `tailscale_get_device_routes` + +Get the subnet routes for a device + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `deviceId` | string | Yes | Device ID | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `advertisedRoutes` | array | Subnet routes the device is advertising | +| `enabledRoutes` | array | Subnet routes that are approved/enabled | + +### `tailscale_set_device_routes` + +Set the enabled subnet routes for a device + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `deviceId` | string | Yes | Device ID | +| `routes` | string | Yes | Comma-separated list of subnet routes to enable \(e.g., "10.0.0.0/24,192.168.1.0/24"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `advertisedRoutes` | array | Subnet routes the device is advertising | +| `enabledRoutes` | array | Subnet routes that are now enabled | + +### `tailscale_update_device_key` + +Enable or disable key expiry on a device + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `deviceId` | string | Yes | Device ID | +| `keyExpiryDisabled` | boolean | Yes | Whether to disable key expiry \(true\) or enable it \(false\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the operation succeeded | +| `deviceId` | string | Device ID | +| `keyExpiryDisabled` | boolean | Whether key expiry is now disabled | + +### `tailscale_list_dns_nameservers` + +Get the DNS nameservers configured for the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `dns` | array | List of DNS nameserver addresses | +| `magicDNS` | boolean | Whether MagicDNS is enabled | + +### `tailscale_set_dns_nameservers` + +Set the DNS nameservers for the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `dns` | string | Yes | Comma-separated list of DNS nameserver IP addresses \(e.g., "8.8.8.8,8.8.4.4"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `dns` | array | Updated list of DNS nameserver addresses | + +### `tailscale_get_dns_preferences` + +Get the DNS preferences for the tailnet including MagicDNS status + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `magicDNS` | boolean | Whether MagicDNS is enabled | + +### `tailscale_set_dns_preferences` + +Set DNS preferences for the tailnet (enable/disable MagicDNS) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `magicDNS` | boolean | Yes | Whether to enable \(true\) or disable \(false\) MagicDNS | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `magicDNS` | boolean | Updated MagicDNS status | + +### `tailscale_get_dns_searchpaths` + +Get the DNS search paths configured for the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `searchPaths` | array | List of DNS search path domains | + +### `tailscale_set_dns_searchpaths` + +Set the DNS search paths for the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `searchPaths` | string | Yes | Comma-separated list of DNS search path domains \(e.g., "corp.example.com,internal.example.com"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `searchPaths` | array | Updated list of DNS search path domains | + +### `tailscale_list_users` + +List all users in the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `users` | array | List of users in the tailnet | +| ↳ `id` | string | User ID | +| ↳ `displayName` | string | Display name | +| ↳ `loginName` | string | Login name / email | +| ↳ `profilePicURL` | string | Profile picture URL | +| ↳ `role` | string | User role \(owner, admin, member, etc.\) | +| ↳ `status` | string | User status \(active, suspended, etc.\) | +| ↳ `type` | string | User type \(member, shared, tagged\) | +| ↳ `created` | string | Creation timestamp | +| ↳ `lastSeen` | string | Last seen timestamp | +| ↳ `deviceCount` | number | Number of devices owned by user | +| `count` | number | Total number of users | + +### `tailscale_create_auth_key` + +Create a new auth key for the tailnet to pre-authorize devices + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `reusable` | boolean | No | Whether the key can be used more than once | +| `ephemeral` | boolean | No | Whether devices authenticated with this key are ephemeral | +| `preauthorized` | boolean | No | Whether devices are pre-authorized \(skip manual approval\) | +| `tags` | string | Yes | Comma-separated list of tags for devices using this key \(e.g., "tag:server,tag:prod"\) | +| `description` | string | No | Description for the auth key | +| `expirySeconds` | number | No | Key expiry time in seconds \(default: 90 days\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Auth key ID | +| `key` | string | The auth key value \(only shown once at creation\) | +| `description` | string | Key description | +| `created` | string | Creation timestamp | +| `expires` | string | Expiration timestamp | +| `revoked` | string | Revocation timestamp \(empty if not revoked\) | +| `capabilities` | object | Key capabilities | +| ↳ `reusable` | boolean | Whether the key is reusable | +| ↳ `ephemeral` | boolean | Whether devices are ephemeral | +| ↳ `preauthorized` | boolean | Whether devices are pre-authorized | +| ↳ `tags` | array | Tags applied to devices using this key | + +### `tailscale_list_auth_keys` + +List all auth keys in the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `keys` | array | List of auth keys | +| ↳ `id` | string | Auth key ID | +| ↳ `description` | string | Key description | +| ↳ `created` | string | Creation timestamp | +| ↳ `expires` | string | Expiration timestamp | +| ↳ `revoked` | string | Revocation timestamp | +| ↳ `capabilities` | object | Key capabilities | +| ↳ `reusable` | boolean | Whether the key is reusable | +| ↳ `ephemeral` | boolean | Whether devices are ephemeral | +| ↳ `preauthorized` | boolean | Whether devices are pre-authorized | +| ↳ `tags` | array | Tags applied to devices | +| `count` | number | Total number of auth keys | + +### `tailscale_get_auth_key` + +Get details of a specific auth key + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `keyId` | string | Yes | Auth key ID | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Auth key ID | +| `description` | string | Key description | +| `created` | string | Creation timestamp | +| `expires` | string | Expiration timestamp | +| `revoked` | string | Revocation timestamp | +| `capabilities` | object | Key capabilities | +| ↳ `reusable` | boolean | Whether the key is reusable | +| ↳ `ephemeral` | boolean | Whether devices are ephemeral | +| ↳ `preauthorized` | boolean | Whether devices are pre-authorized | +| ↳ `tags` | array | Tags applied to devices using this key | + +### `tailscale_delete_auth_key` + +Revoke and delete an auth key + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | +| `keyId` | string | Yes | Auth key ID to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the auth key was successfully deleted | +| `keyId` | string | ID of the deleted auth key | + +### `tailscale_get_acl` + +Get the current ACL policy for the tailnet + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | Tailscale API key | +| `tailnet` | string | Yes | Tailnet name \(e.g., example.com\) or "-" for default | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `acl` | string | ACL policy as JSON string | +| `etag` | string | ETag for the current ACL version \(use with If-Match header for updates\) | + + diff --git a/apps/sim/.env.example b/apps/sim/.env.example index 6c22b09eef4..8db5d82c1af 100644 --- a/apps/sim/.env.example +++ b/apps/sim/.env.example @@ -28,6 +28,7 @@ API_ENCRYPTION_KEY=your_api_encryption_key # Use `openssl rand -hex 32` to gener # OLLAMA_URL=http://localhost:11434 # URL for local Ollama server - uncomment if using local models # VLLM_BASE_URL=http://localhost:8000 # Base URL for your self-hosted vLLM (OpenAI-compatible) # VLLM_API_KEY= # Optional bearer token if your vLLM instance requires auth +# FIREWORKS_API_KEY= # Optional Fireworks AI API key for model listing # Admin API (Optional - for self-hosted GitOps) # ADMIN_API_KEY= # Use `openssl rand -hex 32` to generate. Enables admin API for workflow export/import. diff --git a/apps/sim/app/(landing)/integrations/data/icon-mapping.ts b/apps/sim/app/(landing)/integrations/data/icon-mapping.ts index 841cda375b3..7b9b36c977a 100644 --- a/apps/sim/app/(landing)/integrations/data/icon-mapping.ts +++ b/apps/sim/app/(landing)/integrations/data/icon-mapping.ts @@ -45,6 +45,7 @@ import { EnrichSoIcon, EvernoteIcon, ExaAIIcon, + ExtendIcon, EyeIcon, FathomIcon, FirecrawlIcon, @@ -91,6 +92,7 @@ import { KalshiIcon, KetchIcon, LangsmithIcon, + LaunchDarklyIcon, LemlistIcon, LinearIcon, LinkedInIcon, @@ -140,6 +142,7 @@ import { S3Icon, SalesforceIcon, SearchIcon, + SecretsManagerIcon, SendgridIcon, SentryIcon, SerperIcon, @@ -155,6 +158,7 @@ import { StagehandIcon, StripeIcon, SupabaseIcon, + TailscaleIcon, TavilyIcon, TelegramIcon, TextractIcon, @@ -221,6 +225,7 @@ export const blockTypeToIconMap: Record = { enrich: EnrichSoIcon, evernote: EvernoteIcon, exa: ExaAIIcon, + extend_v2: ExtendIcon, fathom: FathomIcon, file_v3: DocumentIcon, firecrawl: FirecrawlIcon, @@ -269,6 +274,7 @@ export const blockTypeToIconMap: Record = { ketch: KetchIcon, knowledge: PackageSearchIcon, langsmith: LangsmithIcon, + launchdarkly: LaunchDarklyIcon, lemlist: LemlistIcon, linear: LinearIcon, linkedin: LinkedInIcon, @@ -317,6 +323,7 @@ export const blockTypeToIconMap: Record = { s3: S3Icon, salesforce: SalesforceIcon, search: SearchIcon, + secrets_manager: SecretsManagerIcon, sendgrid: SendgridIcon, sentry: SentryIcon, serper: SerperIcon, @@ -333,6 +340,7 @@ export const blockTypeToIconMap: Record = { stripe: StripeIcon, stt_v2: STTIcon, supabase: SupabaseIcon, + tailscale: TailscaleIcon, tavily: TavilyIcon, telegram: TelegramIcon, textract_v2: TextractIcon, diff --git a/apps/sim/app/(landing)/integrations/data/integrations.json b/apps/sim/app/(landing)/integrations/data/integrations.json index 789e5ef4a3e..f36c23fbb4d 100644 --- a/apps/sim/app/(landing)/integrations/data/integrations.json +++ b/apps/sim/app/(landing)/integrations/data/integrations.json @@ -926,6 +926,10 @@ "name": "List Tasks", "description": "List tasks in Attio, optionally filtered by record, assignee, or completion status" }, + { + "name": "Get Task", + "description": "Get a single task by ID from Attio" + }, { "name": "Create Task", "description": "Create a task in Attio" @@ -1039,7 +1043,7 @@ "description": "Delete a webhook from Attio" } ], - "operationCount": 40, + "operationCount": 41, "triggers": [ { "id": "attio_record_created", @@ -1126,18 +1130,77 @@ "name": "Attio List Entry Deleted", "description": "Trigger workflow when a list entry is deleted in Attio" }, + { + "id": "attio_list_created", + "name": "Attio List Created", + "description": "Trigger workflow when a list is created in Attio" + }, + { + "id": "attio_list_updated", + "name": "Attio List Updated", + "description": "Trigger workflow when a list is updated in Attio" + }, + { + "id": "attio_list_deleted", + "name": "Attio List Deleted", + "description": "Trigger workflow when a list is deleted in Attio" + }, + { + "id": "attio_workspace_member_created", + "name": "Attio Workspace Member Created", + "description": "Trigger workflow when a new member is added to the Attio workspace" + }, { "id": "attio_webhook", "name": "Attio Webhook (All Events)", "description": "Trigger workflow on any Attio webhook event" } ], - "triggerCount": 18, + "triggerCount": 22, "authType": "oauth", "category": "tools", "integrationType": "crm", "tags": ["sales-engagement", "enrichment"] }, + { + "type": "secrets_manager", + "slug": "aws-secrets-manager", + "name": "AWS Secrets Manager", + "description": "Connect to AWS Secrets Manager", + "longDescription": "Integrate AWS Secrets Manager into the workflow. Can retrieve, create, update, list, and delete secrets.", + "bgColor": "linear-gradient(45deg, #BD0816 0%, #FF5252 100%)", + "iconName": "SecretsManagerIcon", + "docsUrl": "https://docs.sim.ai/tools/secrets-manager", + "operations": [ + { + "name": "Get Secret", + "description": "Retrieve a secret value from AWS Secrets Manager" + }, + { + "name": "List Secrets", + "description": "List secrets stored in AWS Secrets Manager" + }, + { + "name": "Create Secret", + "description": "Create a new secret in AWS Secrets Manager" + }, + { + "name": "Update Secret", + "description": "Update the value of an existing secret in AWS Secrets Manager" + }, + { + "name": "Delete Secret", + "description": "Delete a secret from AWS Secrets Manager" + } + ], + "operationCount": 5, + "triggers": [], + "triggerCount": 0, + "authType": "none", + "category": "tools", + "integrationType": "developer-tools", + "tags": ["cloud", "secrets-management"] + }, { "type": "textract_v2", "slug": "aws-textract", @@ -2939,6 +3002,24 @@ "integrationType": "search", "tags": ["web-scraping", "enrichment"] }, + { + "type": "extend_v2", + "slug": "extend", + "name": "Extend", + "description": "Parse and extract content from documents", + "longDescription": "Integrate Extend AI into the workflow. Parse and extract structured content from documents or file references.", + "bgColor": "#000000", + "iconName": "ExtendIcon", + "docsUrl": "https://docs.sim.ai/tools/extend", + "operations": [], + "operationCount": 0, + "triggers": [], + "triggerCount": 0, + "authType": "api-key", + "category": "tools", + "integrationType": "ai", + "tags": ["document-processing", "ocr"] + }, { "type": "fathom", "slug": "fathom", @@ -6300,6 +6381,73 @@ "integrationType": "developer-tools", "tags": ["monitoring", "llm", "data-analytics"] }, + { + "type": "launchdarkly", + "slug": "launchdarkly", + "name": "LaunchDarkly", + "description": "Manage feature flags with LaunchDarkly.", + "longDescription": "Integrate LaunchDarkly into your workflow. List, create, update, toggle, and delete feature flags. Manage projects, environments, segments, members, and audit logs. Requires API Key.", + "bgColor": "#191919", + "iconName": "LaunchDarklyIcon", + "docsUrl": "https://docs.sim.ai/tools/launchdarkly", + "operations": [ + { + "name": "List Flags", + "description": "List feature flags in a LaunchDarkly project." + }, + { + "name": "Get Flag", + "description": "Get a single feature flag by key from a LaunchDarkly project." + }, + { + "name": "Create Flag", + "description": "Create a new feature flag in a LaunchDarkly project." + }, + { + "name": "Update Flag", + "description": "Update a feature flag metadata (name, description, tags, temporary, archived) using semantic patch." + }, + { + "name": "Toggle Flag", + "description": "Toggle a feature flag on or off in a specific LaunchDarkly environment." + }, + { + "name": "Delete Flag", + "description": "Delete a feature flag from a LaunchDarkly project." + }, + { + "name": "Get Flag Status", + "description": "Get the status of a feature flag across environments (active, inactive, launched, etc.)." + }, + { + "name": "List Projects", + "description": "List all projects in your LaunchDarkly account." + }, + { + "name": "List Environments", + "description": "List environments in a LaunchDarkly project." + }, + { + "name": "List Segments", + "description": "List user segments in a LaunchDarkly project and environment." + }, + { + "name": "List Members", + "description": "List account members in your LaunchDarkly organization." + }, + { + "name": "Get Audit Log", + "description": "List audit log entries from your LaunchDarkly account." + } + ], + "operationCount": 12, + "triggers": [], + "triggerCount": 0, + "authType": "api-key", + "category": "tools", + "integrationType": "developer-tools", + "tags": ["feature-flags", "ci-cd"] + }, { "type": "lemlist", "slug": "lemlist", @@ -10482,6 +10630,105 @@ "integrationType": "databases", "tags": ["cloud", "data-warehouse", "vector-search"] }, + { + "type": "tailscale", + "slug": "tailscale", + "name": "Tailscale", + "description": "Manage devices and network settings in your Tailscale tailnet", + "longDescription": "Interact with the Tailscale API to manage devices, DNS, ACLs, auth keys, users, and routes across your tailnet.", + "bgColor": "#2E2D2D", + "iconName": "TailscaleIcon", + "docsUrl": "https://docs.sim.ai/tools/tailscale", + "operations": [ + { + "name": "List Devices", + "description": "List all devices in the tailnet" + }, + { + "name": "Get Device", + "description": "Get details of a specific device by ID" + }, + { + "name": "Delete Device", + "description": "Remove a device from the tailnet" + }, + { + "name": "Authorize Device", + "description": "Authorize or deauthorize a device on the tailnet" + }, + { + "name": "Set Device Tags", + "description": "Set tags on a device in the tailnet" + }, + { + "name": "Get Device Routes", + "description": "Get the subnet routes for a device" + }, + { + "name": "Set Device Routes", + "description": "Set the enabled subnet routes for a device" + }, + { + "name": "Update Device Key", + "description": "Enable or disable key expiry on a device" + }, + { + "name": "List DNS Nameservers", + "description": "Get the DNS nameservers configured for the tailnet" + }, + { + "name": "Set DNS Nameservers", + "description": "Set the DNS nameservers for the tailnet" + }, + { + "name": "Get DNS Preferences", + "description": "Get the DNS preferences for the tailnet including MagicDNS status" + }, + { + "name": "Set DNS Preferences", + "description": "Set DNS preferences for the tailnet (enable/disable MagicDNS)" + }, + { + "name": "Get DNS Search Paths", + "description": "Get the DNS search paths configured for the tailnet" + }, + { + "name": "Set DNS Search Paths", + "description": "Set the DNS search paths for the tailnet" + }, + { + "name": "List Users", + "description": "List all users in the tailnet" + }, + { + "name": "Create Auth Key", + "description": "Create a new auth key for the tailnet to pre-authorize devices" + }, + { + "name": "List Auth Keys", + "description": "List all auth keys in the tailnet" + }, + { + "name": "Get Auth Key", + "description": "Get details of a specific auth key" + }, + { + "name": "Delete Auth Key", + "description": "Revoke and delete an auth key" + }, + { + "name": "Get ACL", + "description": "Get the current ACL policy for the tailnet" + } + ], + "operationCount": 20, + "triggers": [], + "triggerCount": 0, + "authType": "api-key", + "category": "tools", + "integrationType": "security", + "tags": ["monitoring"] + }, { "type": "tavily", "slug": "tavily", diff --git a/apps/sim/app/academy/components/sandbox-canvas-provider.tsx b/apps/sim/app/academy/components/sandbox-canvas-provider.tsx index 62edc38bc76..3bd682f43ef 100644 --- a/apps/sim/app/academy/components/sandbox-canvas-provider.tsx +++ b/apps/sim/app/academy/components/sandbox-canvas-provider.tsx @@ -13,10 +13,12 @@ import type { import { validateExercise } from '@/lib/academy/validation' import { cn } from '@/lib/core/utils/cn' import { getEffectiveBlockOutputs } from '@/lib/workflows/blocks/block-outputs' +import { getQueryClient } from '@/app/_shell/providers/get-query-client' import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/global-commands-provider' import { SandboxWorkspacePermissionsProvider } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import Workflow from '@/app/workspace/[workspaceId]/w/[workflowId]/workflow' import { getBlock } from '@/blocks/registry' +import { workflowKeys } from '@/hooks/queries/workflows' import { SandboxBlockConstraintsContext } from '@/hooks/use-sandbox-block-constraints' import { useExecutionStore } from '@/stores/execution/store' import { useTerminalConsoleStore } from '@/stores/terminal/console/store' @@ -218,8 +220,13 @@ export function SandboxCanvasProvider({ useWorkflowStore.getState().replaceWorkflowState(workflowState) useSubBlockStore.getState().initializeFromWorkflow(workflowId, workflowState.blocks) - useWorkflowRegistry.setState((state) => ({ - workflows: { ...state.workflows, [workflowId]: syntheticMetadata }, + + const qc = getQueryClient() + const cacheKey = workflowKeys.list(SANDBOX_WORKSPACE_ID, 'active') + const cached = qc.getQueryData(cacheKey) ?? [] + qc.setQueryData(cacheKey, [...cached.filter((w) => w.id !== workflowId), syntheticMetadata]) + + useWorkflowRegistry.setState({ activeWorkflowId: workflowId, hydration: { phase: 'ready', @@ -228,7 +235,7 @@ export function SandboxCanvasProvider({ requestId: null, error: null, }, - })) + }) logger.info('Sandbox stores hydrated', { workflowId }) setIsReady(true) @@ -262,17 +269,21 @@ export function SandboxCanvasProvider({ unsubWorkflow() unsubSubBlock() unsubExecution() - useWorkflowRegistry.setState((state) => { - const { [workflowId]: _removed, ...rest } = state.workflows - return { - workflows: rest, - activeWorkflowId: state.activeWorkflowId === workflowId ? null : state.activeWorkflowId, - hydration: - state.hydration.workflowId === workflowId - ? { phase: 'idle', workspaceId: null, workflowId: null, requestId: null, error: null } - : state.hydration, - } - }) + const cleanupQc = getQueryClient() + const cleanupKey = workflowKeys.list(SANDBOX_WORKSPACE_ID, 'active') + const cleanupCached = cleanupQc.getQueryData(cleanupKey) ?? [] + cleanupQc.setQueryData( + cleanupKey, + cleanupCached.filter((w) => w.id !== workflowId) + ) + + useWorkflowRegistry.setState((state) => ({ + activeWorkflowId: state.activeWorkflowId === workflowId ? null : state.activeWorkflowId, + hydration: + state.hydration.workflowId === workflowId + ? { phase: 'idle', workspaceId: null, workflowId: null, requestId: null, error: null } + : state.hydration, + })) useWorkflowStore.setState({ blocks: {}, edges: [], loops: {}, parallels: {} }) useSubBlockStore.setState((state) => { const { [workflowId]: _removed, ...rest } = state.workflowValues diff --git a/apps/sim/app/api/knowledge/[id]/documents/route.test.ts b/apps/sim/app/api/knowledge/[id]/documents/route.test.ts index 70eacdf46e8..2be0e79bc52 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/route.test.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/route.test.ts @@ -457,11 +457,8 @@ describe('Knowledge Base Documents API Route', () => { }, ], processingOptions: { - chunkSize: 1024, - minCharactersPerChunk: 100, recipe: 'default', lang: 'en', - chunkOverlap: 200, }, } @@ -533,11 +530,8 @@ describe('Knowledge Base Documents API Route', () => { }, ], processingOptions: { - chunkSize: 50, // Invalid: too small - minCharactersPerChunk: 0, // Invalid: too small recipe: 'default', lang: 'en', - chunkOverlap: 1000, // Invalid: too large }, } diff --git a/apps/sim/app/api/knowledge/[id]/documents/route.ts b/apps/sim/app/api/knowledge/[id]/documents/route.ts index 18f7af35ac2..c65507d81f7 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/route.ts @@ -38,26 +38,14 @@ const CreateDocumentSchema = z.object({ documentTagsData: z.string().optional(), }) -/** - * Schema for bulk document creation with processing options - * - * Processing options units: - * - chunkSize: tokens (1 token ≈ 4 characters) - * - minCharactersPerChunk: characters - * - chunkOverlap: characters - */ const BulkCreateDocumentsSchema = z.object({ documents: z.array(CreateDocumentSchema), - processingOptions: z.object({ - /** Maximum chunk size in tokens (1 token ≈ 4 characters) */ - chunkSize: z.number().min(100).max(4000), - /** Minimum chunk size in characters */ - minCharactersPerChunk: z.number().min(1).max(2000), - recipe: z.string(), - lang: z.string(), - /** Overlap between chunks in characters */ - chunkOverlap: z.number().min(0).max(500), - }), + processingOptions: z + .object({ + recipe: z.string().optional(), + lang: z.string().optional(), + }) + .optional(), bulk: z.literal(true), }) @@ -246,8 +234,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: knowledgeBaseId, documentsCount: createdDocuments.length, uploadType: 'bulk', - chunkSize: validatedData.processingOptions.chunkSize, - recipe: validatedData.processingOptions.recipe, + recipe: validatedData.processingOptions?.recipe, }) } catch (_e) { // Silently fail @@ -256,7 +243,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: processDocumentsWithQueue( createdDocuments, knowledgeBaseId, - validatedData.processingOptions, + validatedData.processingOptions ?? {}, requestId ).catch((error: unknown) => { logger.error(`[${requestId}] Critical error in document processing pipeline:`, error) diff --git a/apps/sim/app/api/knowledge/[id]/documents/upsert/route.ts b/apps/sim/app/api/knowledge/[id]/documents/upsert/route.ts index 2499006ed35..1b44c7a81fe 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/upsert/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/upsert/route.ts @@ -25,13 +25,12 @@ const UpsertDocumentSchema = z.object({ fileSize: z.number().min(1, 'File size must be greater than 0'), mimeType: z.string().min(1, 'MIME type is required'), documentTagsData: z.string().optional(), - processingOptions: z.object({ - chunkSize: z.number().min(100).max(4000), - minCharactersPerChunk: z.number().min(1).max(2000), - recipe: z.string(), - lang: z.string(), - chunkOverlap: z.number().min(0).max(500), - }), + processingOptions: z + .object({ + recipe: z.string().optional(), + lang: z.string().optional(), + }) + .optional(), workflowId: z.string().optional(), }) @@ -166,7 +165,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: processDocumentsWithQueue( createdDocuments, knowledgeBaseId, - validatedData.processingOptions, + validatedData.processingOptions ?? {}, requestId ).catch((error: unknown) => { logger.error(`[${requestId}] Critical error in document processing pipeline:`, error) @@ -178,8 +177,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: knowledgeBaseId, documentsCount: 1, uploadType: 'single', - chunkSize: validatedData.processingOptions.chunkSize, - recipe: validatedData.processingOptions.recipe, + recipe: validatedData.processingOptions?.recipe, }) } catch (_e) { // Silently fail diff --git a/apps/sim/app/api/mcp/servers/[id]/refresh/route.ts b/apps/sim/app/api/mcp/servers/[id]/refresh/route.ts index 7f6f2adb20e..b6b186ec4ac 100644 --- a/apps/sim/app/api/mcp/servers/[id]/refresh/route.ts +++ b/apps/sim/app/api/mcp/servers/[id]/refresh/route.ts @@ -38,15 +38,23 @@ interface SyncResult { updatedWorkflowIds: string[] } +interface ServerMetadata { + url?: string + name?: string +} + /** - * Syncs tool schemas from discovered MCP tools to all workflow blocks using those tools. - * Returns the count and IDs of updated workflows. + * Syncs tool schemas and server metadata from discovered MCP tools to all + * workflow blocks using those tools. Updates stored serverUrl/serverName + * when the server's details have changed, preventing stale badges after + * a server URL edit. */ async function syncToolSchemasToWorkflows( workspaceId: string, serverId: string, tools: McpTool[], - requestId: string + requestId: string, + serverMeta?: ServerMetadata ): Promise { const toolsByName = new Map(tools.map((t) => [t.name, t])) @@ -94,7 +102,10 @@ async function syncToolSchemasToWorkflows( const schemasMatch = JSON.stringify(tool.schema) === JSON.stringify(newSchema) - if (!schemasMatch) { + const urlChanged = serverMeta?.url != null && tool.params.serverUrl !== serverMeta.url + const nameChanged = serverMeta?.name != null && tool.params.serverName !== serverMeta.name + + if (!schemasMatch || urlChanged || nameChanged) { hasUpdates = true const validParamKeys = new Set(Object.keys(newSchema.properties || {})) @@ -106,6 +117,9 @@ async function syncToolSchemasToWorkflows( } } + if (urlChanged) cleanedParams.serverUrl = serverMeta.url + if (nameChanged) cleanedParams.serverName = serverMeta.name + return { ...tool, schema: newSchema, params: cleanedParams } } @@ -188,7 +202,8 @@ export const POST = withMcpAuth<{ id: string }>('read')( workspaceId, serverId, discoveredTools, - requestId + requestId, + { url: server.url ?? undefined, name: server.name ?? undefined } ) } catch (error) { connectionStatus = 'error' diff --git a/apps/sim/app/api/providers/fireworks/models/route.ts b/apps/sim/app/api/providers/fireworks/models/route.ts new file mode 100644 index 00000000000..070d860efcf --- /dev/null +++ b/apps/sim/app/api/providers/fireworks/models/route.ts @@ -0,0 +1,93 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { getBYOKKey } from '@/lib/api-key/byok' +import { getSession } from '@/lib/auth' +import { env } from '@/lib/core/config/env' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { filterBlacklistedModels, isProviderBlacklisted } from '@/providers/utils' + +const logger = createLogger('FireworksModelsAPI') + +interface FireworksModel { + id: string + object?: string + created?: number + owned_by?: string +} + +interface FireworksModelsResponse { + data: FireworksModel[] + object?: string +} + +export async function GET(request: NextRequest) { + if (isProviderBlacklisted('fireworks')) { + logger.info('Fireworks provider is blacklisted, returning empty models') + return NextResponse.json({ models: [] }) + } + + let apiKey: string | undefined + + const workspaceId = request.nextUrl.searchParams.get('workspaceId') + if (workspaceId) { + const session = await getSession() + if (session?.user?.id) { + const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (permission) { + const byokResult = await getBYOKKey(workspaceId, 'fireworks') + if (byokResult) { + apiKey = byokResult.apiKey + } + } + } + } + + if (!apiKey) { + apiKey = env.FIREWORKS_API_KEY + } + + if (!apiKey) { + logger.info('No Fireworks API key available, returning empty models') + return NextResponse.json({ models: [] }) + } + + try { + const response = await fetch('https://api.fireworks.ai/inference/v1/models', { + headers: { + Authorization: `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + cache: 'no-store', + }) + + if (!response.ok) { + logger.warn('Failed to fetch Fireworks models', { + status: response.status, + statusText: response.statusText, + }) + return NextResponse.json({ models: [] }) + } + + const data = (await response.json()) as FireworksModelsResponse + + const allModels: string[] = [] + for (const model of data.data ?? []) { + allModels.push(`fireworks/${model.id}`) + } + + const uniqueModels = Array.from(new Set(allModels)) + const models = filterBlacklistedModels(uniqueModels) + + logger.info('Successfully fetched Fireworks models', { + count: models.length, + filtered: uniqueModels.length - models.length, + }) + + return NextResponse.json({ models }) + } catch (error) { + logger.error('Error fetching Fireworks models', { + error: error instanceof Error ? error.message : 'Unknown error', + }) + return NextResponse.json({ models: [] }) + } +} diff --git a/apps/sim/app/api/schedules/execute/route.ts b/apps/sim/app/api/schedules/execute/route.ts index d739f3aa67b..2a4d4f89872 100644 --- a/apps/sim/app/api/schedules/execute/route.ts +++ b/apps/sim/app/api/schedules/execute/route.ts @@ -146,7 +146,11 @@ export async function GET(request: NextRequest) { }) } else { jobId = await jobQueue.enqueue('schedule-execution', payload, { - metadata: { workflowId: schedule.workflowId ?? undefined, correlation }, + metadata: { + workflowId: schedule.workflowId ?? undefined, + workspaceId: resolvedWorkspaceId ?? undefined, + correlation, + }, }) } logger.info( diff --git a/apps/sim/app/api/tools/extend/parse/route.ts b/apps/sim/app/api/tools/extend/parse/route.ts new file mode 100644 index 00000000000..3f604c48109 --- /dev/null +++ b/apps/sim/app/api/tools/extend/parse/route.ts @@ -0,0 +1,188 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { + secureFetchWithPinnedIP, + validateUrlWithDNS, +} from '@/lib/core/security/input-validation.server' +import { generateRequestId } from '@/lib/core/utils/request' +import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas' +import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils' +import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('ExtendParseAPI') + +const ExtendParseSchema = z.object({ + apiKey: z.string().min(1, 'API key is required'), + filePath: z.string().optional(), + file: RawFileInputSchema.optional(), + outputFormat: z.enum(['markdown', 'spatial']).optional(), + chunking: z.enum(['page', 'document', 'section']).optional(), + engine: z.enum(['parse_performance', 'parse_light']).optional(), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized Extend parse attempt`, { + error: authResult.error || 'Missing userId', + }) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Unauthorized', + }, + { status: 401 } + ) + } + + const userId = authResult.userId + const body = await request.json() + const validatedData = ExtendParseSchema.parse(body) + + logger.info(`[${requestId}] Extend parse request`, { + fileName: validatedData.file?.name, + filePath: validatedData.filePath, + isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false, + userId, + }) + + const resolution = await resolveFileInputToUrl({ + file: validatedData.file, + filePath: validatedData.filePath, + userId, + requestId, + logger, + }) + + if (resolution.error) { + return NextResponse.json( + { success: false, error: resolution.error.message }, + { status: resolution.error.status } + ) + } + + const fileUrl = resolution.fileUrl + if (!fileUrl) { + return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 }) + } + + const extendBody: Record = { + file: { fileUrl }, + } + + const config: Record = {} + + if (validatedData.outputFormat) { + config.target = validatedData.outputFormat + } + + if (validatedData.chunking) { + config.chunkingStrategy = { type: validatedData.chunking } + } + + if (validatedData.engine) { + config.engine = validatedData.engine + } + + if (Object.keys(config).length > 0) { + extendBody.config = config + } + + const extendEndpoint = 'https://api.extend.ai/parse' + const extendValidation = await validateUrlWithDNS(extendEndpoint, 'Extend API URL') + if (!extendValidation.isValid) { + logger.error(`[${requestId}] Extend API URL validation failed`, { + error: extendValidation.error, + }) + return NextResponse.json( + { + success: false, + error: 'Failed to reach Extend API', + }, + { status: 502 } + ) + } + + const extendResponse = await secureFetchWithPinnedIP( + extendEndpoint, + extendValidation.resolvedIP!, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + Authorization: `Bearer ${validatedData.apiKey}`, + 'x-extend-api-version': '2025-04-21', + }, + body: JSON.stringify(extendBody), + } + ) + + if (!extendResponse.ok) { + const errorText = await extendResponse.text() + logger.error(`[${requestId}] Extend API error:`, errorText) + let clientError = `Extend API error: ${extendResponse.statusText || extendResponse.status}` + try { + const parsedError = JSON.parse(errorText) + if (parsedError?.message || parsedError?.error) { + clientError = (parsedError.message ?? parsedError.error) as string + } + } catch { + // errorText is not JSON; keep generic message + } + return NextResponse.json( + { + success: false, + error: clientError, + }, + { status: extendResponse.status } + ) + } + + const extendData = (await extendResponse.json()) as Record + + logger.info(`[${requestId}] Extend parse successful`) + + return NextResponse.json({ + success: true, + output: { + id: extendData.id ?? null, + status: extendData.status ?? 'PROCESSED', + chunks: extendData.chunks ?? [], + blocks: extendData.blocks ?? [], + pageCount: extendData.pageCount ?? extendData.page_count ?? null, + creditsUsed: extendData.creditsUsed ?? extendData.credits_used ?? null, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error in Extend parse:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/secrets_manager/create-secret/route.ts b/apps/sim/app/api/tools/secrets_manager/create-secret/route.ts new file mode 100644 index 00000000000..baa3f39de9b --- /dev/null +++ b/apps/sim/app/api/tools/secrets_manager/create-secret/route.ts @@ -0,0 +1,65 @@ +import { randomUUID } from 'crypto' +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createSecret, createSecretsManagerClient } from '../utils' + +const logger = createLogger('SecretsManagerCreateSecretAPI') + +const CreateSecretSchema = z.object({ + region: z.string().min(1, 'AWS region is required'), + accessKeyId: z.string().min(1, 'AWS access key ID is required'), + secretAccessKey: z.string().min(1, 'AWS secret access key is required'), + name: z.string().min(1, 'Secret name is required'), + secretValue: z.string().min(1, 'Secret value is required'), + description: z.string().nullish(), +}) + +export async function POST(request: NextRequest) { + const requestId = randomUUID().slice(0, 8) + + const auth = await checkInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const params = CreateSecretSchema.parse(body) + + logger.info(`[${requestId}] Creating secret ${params.name}`) + + const client = createSecretsManagerClient({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + }) + + try { + const result = await createSecret(client, params.name, params.secretValue, params.description) + + logger.info(`[${requestId}] Secret created: ${result.name}`) + + return NextResponse.json({ + message: `Secret "${result.name}" created successfully`, + ...result, + }) + } finally { + client.destroy() + } + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred' + logger.error(`[${requestId}] Failed to create secret:`, error) + + return NextResponse.json({ error: `Failed to create secret: ${errorMessage}` }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/secrets_manager/delete-secret/route.ts b/apps/sim/app/api/tools/secrets_manager/delete-secret/route.ts new file mode 100644 index 00000000000..87b76f2391c --- /dev/null +++ b/apps/sim/app/api/tools/secrets_manager/delete-secret/route.ts @@ -0,0 +1,71 @@ +import { randomUUID } from 'crypto' +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createSecretsManagerClient, deleteSecret } from '../utils' + +const logger = createLogger('SecretsManagerDeleteSecretAPI') + +const DeleteSecretSchema = z.object({ + region: z.string().min(1, 'AWS region is required'), + accessKeyId: z.string().min(1, 'AWS access key ID is required'), + secretAccessKey: z.string().min(1, 'AWS secret access key is required'), + secretId: z.string().min(1, 'Secret ID is required'), + recoveryWindowInDays: z.number().min(7).max(30).nullish(), + forceDelete: z.boolean().nullish(), +}) + +export async function POST(request: NextRequest) { + const requestId = randomUUID().slice(0, 8) + + const auth = await checkInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const params = DeleteSecretSchema.parse(body) + + logger.info(`[${requestId}] Deleting secret ${params.secretId}`) + + const client = createSecretsManagerClient({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + }) + + try { + const result = await deleteSecret( + client, + params.secretId, + params.recoveryWindowInDays, + params.forceDelete + ) + + const action = params.forceDelete ? 'permanently deleted' : 'scheduled for deletion' + logger.info(`[${requestId}] Secret ${action}: ${result.name}`) + + return NextResponse.json({ + message: `Secret "${result.name}" ${action}`, + ...result, + }) + } finally { + client.destroy() + } + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred' + logger.error(`[${requestId}] Failed to delete secret:`, error) + + return NextResponse.json({ error: `Failed to delete secret: ${errorMessage}` }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/secrets_manager/get-secret/route.ts b/apps/sim/app/api/tools/secrets_manager/get-secret/route.ts new file mode 100644 index 00000000000..f96c81bd811 --- /dev/null +++ b/apps/sim/app/api/tools/secrets_manager/get-secret/route.ts @@ -0,0 +1,70 @@ +import { randomUUID } from 'crypto' +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createSecretsManagerClient, getSecretValue } from '../utils' + +const logger = createLogger('SecretsManagerGetSecretAPI') + +const GetSecretSchema = z.object({ + region: z.string().min(1, 'AWS region is required'), + accessKeyId: z.string().min(1, 'AWS access key ID is required'), + secretAccessKey: z.string().min(1, 'AWS secret access key is required'), + secretId: z.string().min(1, 'Secret ID is required'), + versionId: z.string().nullish(), + versionStage: z.string().nullish(), +}) + +export async function POST(request: NextRequest) { + const requestId = randomUUID().slice(0, 8) + + const auth = await checkInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const params = GetSecretSchema.parse(body) + + logger.info(`[${requestId}] Retrieving secret ${params.secretId}`) + + const client = createSecretsManagerClient({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + }) + + try { + const result = await getSecretValue( + client, + params.secretId, + params.versionId, + params.versionStage + ) + + logger.info(`[${requestId}] Secret retrieved successfully`) + + return NextResponse.json(result) + } finally { + client.destroy() + } + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred' + logger.error(`[${requestId}] Failed to retrieve secret:`, error) + + return NextResponse.json( + { error: `Failed to retrieve secret: ${errorMessage}` }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/secrets_manager/list-secrets/route.ts b/apps/sim/app/api/tools/secrets_manager/list-secrets/route.ts new file mode 100644 index 00000000000..00b0e68e591 --- /dev/null +++ b/apps/sim/app/api/tools/secrets_manager/list-secrets/route.ts @@ -0,0 +1,61 @@ +import { randomUUID } from 'crypto' +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createSecretsManagerClient, listSecrets } from '../utils' + +const logger = createLogger('SecretsManagerListSecretsAPI') + +const ListSecretsSchema = z.object({ + region: z.string().min(1, 'AWS region is required'), + accessKeyId: z.string().min(1, 'AWS access key ID is required'), + secretAccessKey: z.string().min(1, 'AWS secret access key is required'), + maxResults: z.number().min(1).max(100).nullish(), + nextToken: z.string().nullish(), +}) + +export async function POST(request: NextRequest) { + const requestId = randomUUID().slice(0, 8) + + const auth = await checkInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const params = ListSecretsSchema.parse(body) + + logger.info(`[${requestId}] Listing secrets`) + + const client = createSecretsManagerClient({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + }) + + try { + const result = await listSecrets(client, params.maxResults, params.nextToken) + + logger.info(`[${requestId}] Listed ${result.count} secrets`) + + return NextResponse.json(result) + } finally { + client.destroy() + } + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred' + logger.error(`[${requestId}] Failed to list secrets:`, error) + + return NextResponse.json({ error: `Failed to list secrets: ${errorMessage}` }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/secrets_manager/update-secret/route.ts b/apps/sim/app/api/tools/secrets_manager/update-secret/route.ts new file mode 100644 index 00000000000..e82f2aedda3 --- /dev/null +++ b/apps/sim/app/api/tools/secrets_manager/update-secret/route.ts @@ -0,0 +1,70 @@ +import { randomUUID } from 'crypto' +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { createSecretsManagerClient, updateSecretValue } from '../utils' + +const logger = createLogger('SecretsManagerUpdateSecretAPI') + +const UpdateSecretSchema = z.object({ + region: z.string().min(1, 'AWS region is required'), + accessKeyId: z.string().min(1, 'AWS access key ID is required'), + secretAccessKey: z.string().min(1, 'AWS secret access key is required'), + secretId: z.string().min(1, 'Secret ID is required'), + secretValue: z.string().min(1, 'Secret value is required'), + description: z.string().nullish(), +}) + +export async function POST(request: NextRequest) { + const requestId = randomUUID().slice(0, 8) + + const auth = await checkInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + try { + const body = await request.json() + const params = UpdateSecretSchema.parse(body) + + logger.info(`[${requestId}] Updating secret ${params.secretId}`) + + const client = createSecretsManagerClient({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + }) + + try { + const result = await updateSecretValue( + client, + params.secretId, + params.secretValue, + params.description + ) + + logger.info(`[${requestId}] Secret updated: ${result.name}`) + + return NextResponse.json({ + message: `Secret "${result.name}" updated successfully`, + ...result, + }) + } finally { + client.destroy() + } + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred' + logger.error(`[${requestId}] Failed to update secret:`, error) + + return NextResponse.json({ error: `Failed to update secret: ${errorMessage}` }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/secrets_manager/utils.ts b/apps/sim/app/api/tools/secrets_manager/utils.ts new file mode 100644 index 00000000000..331197fceca --- /dev/null +++ b/apps/sim/app/api/tools/secrets_manager/utils.ts @@ -0,0 +1,140 @@ +import { + CreateSecretCommand, + DeleteSecretCommand, + GetSecretValueCommand, + ListSecretsCommand, + SecretsManagerClient, + UpdateSecretCommand, +} from '@aws-sdk/client-secrets-manager' +import type { SecretsManagerConnectionConfig } from '@/tools/secrets_manager/types' + +export function createSecretsManagerClient( + config: SecretsManagerConnectionConfig +): SecretsManagerClient { + return new SecretsManagerClient({ + region: config.region, + credentials: { + accessKeyId: config.accessKeyId, + secretAccessKey: config.secretAccessKey, + }, + }) +} + +export async function getSecretValue( + client: SecretsManagerClient, + secretId: string, + versionId?: string | null, + versionStage?: string | null +) { + const command = new GetSecretValueCommand({ + SecretId: secretId, + ...(versionId ? { VersionId: versionId } : {}), + ...(versionStage ? { VersionStage: versionStage } : {}), + }) + + const response = await client.send(command) + + if (!response.SecretString && response.SecretBinary) { + throw new Error( + 'Secret is stored as binary (SecretBinary). This integration only supports string secrets.' + ) + } + + return { + name: response.Name ?? '', + secretValue: response.SecretString ?? '', + arn: response.ARN ?? '', + versionId: response.VersionId ?? '', + versionStages: response.VersionStages ?? [], + createdDate: response.CreatedDate?.toISOString() ?? null, + } +} + +export async function listSecrets( + client: SecretsManagerClient, + maxResults?: number | null, + nextToken?: string | null +) { + const command = new ListSecretsCommand({ + ...(maxResults ? { MaxResults: maxResults } : {}), + ...(nextToken ? { NextToken: nextToken } : {}), + }) + + const response = await client.send(command) + const secrets = (response.SecretList ?? []).map((secret) => ({ + name: secret.Name ?? '', + arn: secret.ARN ?? '', + description: secret.Description ?? null, + createdDate: secret.CreatedDate?.toISOString() ?? null, + lastChangedDate: secret.LastChangedDate?.toISOString() ?? null, + lastAccessedDate: secret.LastAccessedDate?.toISOString() ?? null, + rotationEnabled: secret.RotationEnabled ?? false, + tags: secret.Tags?.map((t) => ({ key: t.Key ?? '', value: t.Value ?? '' })) ?? [], + })) + + return { + secrets, + nextToken: response.NextToken ?? null, + count: secrets.length, + } +} + +export async function createSecret( + client: SecretsManagerClient, + name: string, + secretValue: string, + description?: string | null +) { + const command = new CreateSecretCommand({ + Name: name, + SecretString: secretValue, + ...(description ? { Description: description } : {}), + }) + + const response = await client.send(command) + return { + name: response.Name ?? '', + arn: response.ARN ?? '', + versionId: response.VersionId ?? '', + } +} + +export async function updateSecretValue( + client: SecretsManagerClient, + secretId: string, + secretValue: string, + description?: string | null +) { + const command = new UpdateSecretCommand({ + SecretId: secretId, + SecretString: secretValue, + ...(description ? { Description: description } : {}), + }) + + const response = await client.send(command) + return { + name: response.Name ?? '', + arn: response.ARN ?? '', + versionId: response.VersionId ?? '', + } +} + +export async function deleteSecret( + client: SecretsManagerClient, + secretId: string, + recoveryWindowInDays?: number | null, + forceDelete?: boolean | null +) { + const command = new DeleteSecretCommand({ + SecretId: secretId, + ...(forceDelete ? { ForceDeleteWithoutRecovery: true } : {}), + ...(!forceDelete && recoveryWindowInDays ? { RecoveryWindowInDays: recoveryWindowInDays } : {}), + }) + + const response = await client.send(command) + return { + name: response.Name ?? '', + arn: response.ARN ?? '', + deletionDate: response.DeletionDate?.toISOString() ?? null, + } +} diff --git a/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts b/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts index 193111ee20c..7310a4eca98 100644 --- a/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts +++ b/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts @@ -187,8 +187,6 @@ export async function POST(request: NextRequest, { params }: DocumentsRouteParam requestId ) - const chunkingConfig = result.kb.chunkingConfig ?? { maxSize: 1024, minSize: 100, overlap: 200 } - const documentData: DocumentData = { documentId: newDocument.id, filename: file.name, @@ -197,18 +195,7 @@ export async function POST(request: NextRequest, { params }: DocumentsRouteParam mimeType: contentType, } - processDocumentsWithQueue( - [documentData], - knowledgeBaseId, - { - chunkSize: chunkingConfig.maxSize, - minCharactersPerChunk: chunkingConfig.minSize, - chunkOverlap: chunkingConfig.overlap, - recipe: 'default', - lang: 'en', - }, - requestId - ).catch(() => { + processDocumentsWithQueue([documentData], knowledgeBaseId, {}, requestId).catch(() => { // Processing errors are logged internally }) diff --git a/apps/sim/app/api/webhooks/agentmail/route.ts b/apps/sim/app/api/webhooks/agentmail/route.ts index c23a2f551c9..7486a24556e 100644 --- a/apps/sim/app/api/webhooks/agentmail/route.ts +++ b/apps/sim/app/api/webhooks/agentmail/route.ts @@ -162,7 +162,13 @@ export async function POST(req: Request) { if (isTriggerDevEnabled) { try { - const handle = await tasks.trigger('mothership-inbox-execution', { taskId }) + const handle = await tasks.trigger( + 'mothership-inbox-execution', + { taskId }, + { + tags: [`workspaceId:${result.id}`, `taskId:${taskId}`], + } + ) await db .update(mothershipInboxTask) .set({ triggerJobId: handle.id }) diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index 86522989004..37e87cb4de2 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -237,7 +237,7 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise -} - -export default async function FileDetailPage({ params }: FileDetailPageProps) { - const { workspaceId } = await params - const session = await getSession() - - if (!session?.user?.id) { - redirect('/') - } - - const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) - if (!hasPermission) { - redirect('/') - } - - const permissionConfig = await getUserPermissionConfig(session.user.id) - if (permissionConfig?.hideFilesTab) { - redirect(`/workspace/${workspaceId}`) - } - - return -} +export default Files diff --git a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx index b939d50898d..a450bd374da 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx @@ -1,15 +1,22 @@ 'use client' import { createLogger } from '@sim/logger' -import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace' +import { useParams } from 'next/navigation' +import { useWorkspaceFileRecord } from '@/hooks/queries/workspace-files' const logger = createLogger('FileViewer') -interface FileViewerProps { - file: WorkspaceFileRecord -} +export function FileViewer() { + const params = useParams() + const workspaceId = params?.workspaceId as string + const fileId = params?.fileId as string + + const { data: file, isLoading } = useWorkspaceFileRecord(workspaceId, fileId) + + if (isLoading || !file) { + return null + } -export function FileViewer({ file }: FileViewerProps) { const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace` return ( @@ -18,7 +25,7 @@ export function FileViewer({ file }: FileViewerProps) { src={serveUrl} className='h-full w-full border-0' title={file.name} - onError={(e) => { + onError={() => { logger.error(`Failed to load file: ${file.name}`) }} /> diff --git a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/page.tsx b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/page.tsx index dc8a2246bee..92aa310135d 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/page.tsx @@ -1,46 +1,9 @@ import type { Metadata } from 'next' -import { redirect, unstable_rethrow } from 'next/navigation' -import { getSession } from '@/lib/auth' -import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace' -import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' -import { FileViewer } from '@/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer' +import { FileViewer } from './file-viewer' export const metadata: Metadata = { title: 'File', robots: { index: false }, } -interface FileViewerPageProps { - params: Promise<{ - workspaceId: string - fileId: string - }> -} - -export default async function FileViewerPage({ params }: FileViewerPageProps) { - const { workspaceId, fileId } = await params - - const session = await getSession() - if (!session?.user?.id) { - redirect('/') - } - - const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) - if (!hasPermission) { - redirect(`/workspace/${workspaceId}`) - } - - let fileRecord: Awaited> - try { - fileRecord = await getWorkspaceFile(workspaceId, fileId) - } catch (error) { - unstable_rethrow(error) - redirect(`/workspace/${workspaceId}`) - } - - if (!fileRecord) { - redirect(`/workspace/${workspaceId}`) - } - - return -} +export default FileViewer diff --git a/apps/sim/app/workspace/[workspaceId]/files/files.tsx b/apps/sim/app/workspace/[workspaceId]/files/files.tsx index 16be4f3e8cd..a4c82e1672e 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/files.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/files.tsx @@ -75,6 +75,7 @@ import { } from '@/hooks/queries/workspace-files' import { useDebounce } from '@/hooks/use-debounce' import { useInlineRename } from '@/hooks/use-inline-rename' +import { usePermissionConfig } from '@/hooks/use-permission-config' type SaveStatus = 'idle' | 'saving' | 'saved' | 'error' @@ -136,6 +137,13 @@ export function Files() { const fileIdFromRoute = typeof params?.fileId === 'string' && params.fileId.length > 0 ? params.fileId : null const userPermissions = useUserPermissionsContext() + const { config: permissionConfig } = usePermissionConfig() + + useEffect(() => { + if (permissionConfig.hideFilesTab) { + router.replace(`/workspace/${workspaceId}`) + } + }, [permissionConfig.hideFilesTab, router, workspaceId]) const { data: files = [], isLoading, error } = useWorkspaceFiles(workspaceId) const { data: members } = useWorkspaceMembersQuery(workspaceId) diff --git a/apps/sim/app/workspace/[workspaceId]/files/page.tsx b/apps/sim/app/workspace/[workspaceId]/files/page.tsx index 31204696522..43db21afa32 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/page.tsx @@ -1,8 +1,4 @@ import type { Metadata } from 'next' -import { redirect } from 'next/navigation' -import { getSession } from '@/lib/auth' -import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' -import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check' import { Files } from './files' export const metadata: Metadata = { @@ -10,29 +6,4 @@ export const metadata: Metadata = { robots: { index: false }, } -interface FilesPageProps { - params: Promise<{ - workspaceId: string - }> -} - -export default async function FilesPage({ params }: FilesPageProps) { - const { workspaceId } = await params - const session = await getSession() - - if (!session?.user?.id) { - redirect('/') - } - - const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) - if (!hasPermission) { - redirect('/') - } - - const permissionConfig = await getUserPermissionConfig(session.user.id) - if (permissionConfig?.hideFilesTab) { - redirect(`/workspace/${workspaceId}`) - } - - return -} +export default Files diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/add-resource-dropdown/add-resource-dropdown.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/add-resource-dropdown/add-resource-dropdown.tsx index bd0cf8cc792..821d6c47242 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/add-resource-dropdown/add-resource-dropdown.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/add-resource-dropdown/add-resource-dropdown.tsx @@ -47,7 +47,7 @@ export function useAvailableResources( workspaceId: string, existingKeys: Set ): AvailableItemsByType[] { - const { data: workflows = [] } = useWorkflows(workspaceId, { syncRegistry: false }) + const { data: workflows = [] } = useWorkflows(workspaceId) const { data: tables = [] } = useTablesList(workspaceId) const { data: files = [] } = useWorkspaceFiles(workspaceId) const { data: knowledgeBases } = useKnowledgeBasesQuery(workspaceId) diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx index 6e5913caa1e..3ec33a67366 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx @@ -37,6 +37,7 @@ import { import { Table } from '@/app/workspace/[workspaceId]/tables/[tableId]/components' import { useUsageLimits } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks' import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution' +import { useWorkflows } from '@/hooks/queries/workflows' import { useWorkspaceFiles } from '@/hooks/queries/workspace-files' import { useSettingsNavigation } from '@/hooks/use-settings-navigation' import { useExecutionStore } from '@/stores/execution/store' @@ -375,15 +376,16 @@ interface EmbeddedWorkflowProps { } function EmbeddedWorkflow({ workspaceId, workflowId }: EmbeddedWorkflowProps) { - const workflowExists = useWorkflowRegistry((state) => Boolean(state.workflows[workflowId])) - const isMetadataLoaded = useWorkflowRegistry( - (state) => state.hydration.phase !== 'idle' && state.hydration.phase !== 'metadata-loading' + const { data: workflowList, isPending: isWorkflowsPending } = useWorkflows(workspaceId) + const workflowExists = useMemo( + () => (workflowList ?? []).some((w) => w.id === workflowId), + [workflowList, workflowId] ) const hasLoadError = useWorkflowRegistry( (state) => state.hydration.phase === 'error' && state.hydration.workflowId === workflowId ) - if (!isMetadataLoaded) return LOADING_SKELETON + if (isWorkflowsPending) return LOADING_SKELETON if (!workflowExists || hasLoadError) { return ( diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry/resource-registry.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry/resource-registry.tsx index 4b545dc298b..5c8bd184cf5 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry/resource-registry.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry/resource-registry.tsx @@ -1,7 +1,8 @@ 'use client' -import type { ElementType, ReactNode } from 'react' +import { type ElementType, type ReactNode, useMemo } from 'react' import type { QueryClient } from '@tanstack/react-query' +import { useParams } from 'next/navigation' import { Database, File as FileIcon, @@ -17,9 +18,9 @@ import type { } from '@/app/workspace/[workspaceId]/home/types' import { knowledgeKeys } from '@/hooks/queries/kb/knowledge' import { tableKeys } from '@/hooks/queries/tables' -import { workflowKeys } from '@/hooks/queries/workflows' +import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists' +import { useWorkflows } from '@/hooks/queries/workflows' import { workspaceFilesKeys } from '@/hooks/queries/workspace-files' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' interface DropdownItemRenderProps { item: { id: string; name: string; [key: string]: unknown } @@ -34,7 +35,12 @@ export interface ResourceTypeConfig { } function WorkflowTabSquare({ workflowId, className }: { workflowId: string; className?: string }) { - const color = useWorkflowRegistry((state) => state.workflows[workflowId]?.color ?? '#888') + const { workspaceId } = useParams<{ workspaceId: string }>() + const { data: workflowList } = useWorkflows(workspaceId) + const color = useMemo(() => { + const wf = (workflowList ?? []).find((w) => w.id === workflowId) + return wf?.color ?? '#888' + }, [workflowList, workflowId]) return (
{ - qc.invalidateQueries({ queryKey: workflowKeys.lists() }) + workflow: (qc, wId) => { + void invalidateWorkflowLists(qc, wId) }, knowledgebase: (qc, _wId, id) => { qc.invalidateQueries({ queryKey: knowledgeKeys.lists() }) diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-tabs/resource-tabs.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-tabs/resource-tabs.tsx index 20e132089c9..c630d50b5e9 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-tabs/resource-tabs.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-tabs/resource-tabs.tsx @@ -53,7 +53,7 @@ const PREVIEW_MODE_LABELS: Record = { * tabs always reflect the latest name even after a rename. */ function useResourceNameLookup(workspaceId: string): Map { - const { data: workflows = [] } = useWorkflows(workspaceId, { syncRegistry: false }) + const { data: workflows = [] } = useWorkflows(workspaceId) const { data: tables = [] } = useTablesList(workspaceId) const { data: files = [] } = useWorkspaceFiles(workspaceId) const { data: knowledgeBases } = useKnowledgeBasesQuery(workspaceId) diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx index 2735afc993e..c44471f019a 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/user-input/user-input.tsx @@ -45,8 +45,8 @@ import { computeMentionHighlightRanges, extractContextTokens, } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/utils' +import { useWorkflowMap } from '@/hooks/queries/workflows' import type { ChatContext } from '@/stores/panel' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' export type { FileAttachmentForApi } from '@/app/workspace/[workspaceId]/home/types' @@ -122,6 +122,7 @@ export function UserInput({ onContextAdd, }: UserInputProps) { const { workspaceId } = useParams<{ workspaceId: string }>() + const { data: workflowsById = {} } = useWorkflowMap(workspaceId) const { data: session } = useSession() const [value, setValue] = useState(defaultValue) const overlayRef = useRef(null) @@ -617,7 +618,6 @@ export function UserInput({ const elements: React.ReactNode[] = [] let lastIndex = 0 - for (let i = 0; i < ranges.length; i++) { const range = ranges[i] @@ -639,7 +639,7 @@ export function UserInput({ case 'workflow': case 'current_workflow': { const wfId = (matchingCtx as { workflowId: string }).workflowId - const wfColor = useWorkflowRegistry.getState().workflows[wfId]?.color ?? '#888' + const wfColor = workflowsById[wfId]?.color ?? '#888' mentionIconNode = (
0 ? elements : {'\u00A0'} - }, [value, contextManagement.selectedContexts]) + }, [value, contextManagement.selectedContexts, workflowsById]) return (
{ - if (context.kind === 'workflow' || context.kind === 'current_workflow') { - return state.workflows[context.workflowId || '']?.color ?? null - } - return null - }) + const { workspaceId } = useParams<{ workspaceId: string }>() + const { data: workflowList } = useWorkflows(workspaceId) + const workflowColor = useMemo(() => { + if (context.kind !== 'workflow' && context.kind !== 'current_workflow') return null + return (workflowList ?? []).find((w) => w.id === context.workflowId)?.color ?? null + }, [workflowList, context.kind, context.workflowId]) let icon: React.ReactNode = null const iconClasses = 'h-[12px] w-[12px] flex-shrink-0 text-[var(--text-icon)]' diff --git a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts index 7b5c1cda635..5fa965d9abe 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts +++ b/apps/sim/app/workspace/[workspaceId]/home/hooks/use-chat.ts @@ -21,7 +21,23 @@ import { import { VFS_DIR_TO_RESOURCE } from '@/lib/copilot/resource-types' import { isWorkflowToolName } from '@/lib/copilot/workflow-tools' import { getNextWorkflowColor } from '@/lib/workflows/colors' +import { getQueryClient } from '@/app/_shell/providers/get-query-client' import { invalidateResourceQueries } from '@/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry' +import type { + ChatMessage, + ChatMessageAttachment, + ContentBlock, + ContentBlockType, + FileAttachmentForApi, + GenericResourceData, + GenericResourceEntry, + MothershipResource, + MothershipResourceType, + QueuedMessage, + SSEPayload, + SSEPayloadData, + ToolCallStatus, +} from '@/app/workspace/[workspaceId]/home/types' import { deploymentKeys } from '@/hooks/queries/deployments' import { fetchChatHistory, @@ -34,29 +50,17 @@ import { taskKeys, useChatHistory, } from '@/hooks/queries/tasks' +import { getFolderMap } from '@/hooks/queries/utils/folder-cache' +import { invalidateWorkflowSelectors } from '@/hooks/queries/utils/invalidate-workflow-lists' import { getTopInsertionSortOrder } from '@/hooks/queries/utils/top-insertion-sort-order' +import { getWorkflowById, getWorkflows } from '@/hooks/queries/utils/workflow-cache' import { workflowKeys } from '@/hooks/queries/workflows' import { useExecutionStream } from '@/hooks/use-execution-stream' import { useExecutionStore } from '@/stores/execution/store' -import { useFolderStore } from '@/stores/folders/store' import type { ChatContext } from '@/stores/panel' import { consolePersistence, useTerminalConsoleStore } from '@/stores/terminal' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' -import type { - ChatMessage, - ChatMessageAttachment, - ContentBlock, - ContentBlockType, - FileAttachmentForApi, - GenericResourceData, - GenericResourceEntry, - MothershipResource, - MothershipResourceType, - QueuedMessage, - SSEPayload, - SSEPayloadData, - ToolCallStatus, -} from '../types' +import type { WorkflowMetadata } from '@/stores/workflows/registry/types' export interface UseChatReturn { messages: ChatMessage[] @@ -301,31 +305,37 @@ function getPayloadData(payload: SSEPayload): SSEPayloadData | undefined { return typeof payload.data === 'object' ? payload.data : undefined } -/** Adds a workflow to the registry with a top-insertion sort order if it doesn't already exist. */ +/** Adds a workflow to the React Query cache with a top-insertion sort order if it doesn't already exist. */ function ensureWorkflowInRegistry(resourceId: string, title: string, workspaceId: string): boolean { - const registry = useWorkflowRegistry.getState() - if (registry.workflows[resourceId]) return false + const workflows = getWorkflows(workspaceId) + if (workflows.some((w) => w.id === resourceId)) return false const sortOrder = getTopInsertionSortOrder( - registry.workflows, - useFolderStore.getState().folders, + Object.fromEntries(workflows.map((w) => [w.id, w])), + getFolderMap(workspaceId), workspaceId, null ) - useWorkflowRegistry.setState((state) => ({ - workflows: { - ...state.workflows, - [resourceId]: { - id: resourceId, - name: title, - lastModified: new Date(), - createdAt: new Date(), - color: getNextWorkflowColor(), - workspaceId, - folderId: null, - sortOrder, - }, - }, - })) + const newMetadata: WorkflowMetadata = { + id: resourceId, + name: title, + lastModified: new Date(), + createdAt: new Date(), + color: getNextWorkflowColor(), + workspaceId, + folderId: null, + sortOrder, + } + const queryClient = getQueryClient() + const key = workflowKeys.list(workspaceId, 'active') + queryClient.setQueryData(key, (current) => { + const next = current ?? workflows + if (next.some((workflow) => workflow.id === resourceId)) { + return next + } + + return [...next, newMetadata] + }) + void invalidateWorkflowSelectors(queryClient, workspaceId) return true } @@ -1253,7 +1263,7 @@ export function useChat( ? ((args as Record).workflowId as string) : useWorkflowRegistry.getState().activeWorkflowId if (targetWorkflowId) { - const meta = useWorkflowRegistry.getState().workflows[targetWorkflowId] + const meta = getWorkflowById(workspaceId, targetWorkflowId) const wasAdded = addResource({ type: 'workflow', id: targetWorkflowId, diff --git a/apps/sim/app/workspace/[workspaceId]/home/page.tsx b/apps/sim/app/workspace/[workspaceId]/home/page.tsx index 659b6e6865e..f56a5d2124a 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/page.tsx @@ -1,31 +1,8 @@ import type { Metadata } from 'next' -import { redirect } from 'next/navigation' -import { getSession } from '@/lib/auth' -import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' import { Home } from './home' export const metadata: Metadata = { title: 'Home', } -interface HomePageProps { - params: Promise<{ - workspaceId: string - }> -} - -export default async function HomePage({ params }: HomePageProps) { - const { workspaceId } = await params - const session = await getSession() - - if (!session?.user?.id) { - redirect('/') - } - - const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) - if (!hasPermission) { - redirect('/') - } - - return -} +export default Home diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-editor/chunk-editor.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-editor/chunk-editor.tsx index 48133841228..c646fdf15f9 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-editor/chunk-editor.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/chunk-editor/chunk-editor.tsx @@ -56,22 +56,11 @@ export function ChunkEditor({ const [savedContent, setSavedContent] = useState(chunkContent) const [tokenizerOn, setTokenizerOn] = useState(false) const [hoveredTokenIndex, setHoveredTokenIndex] = useState(null) - const prevChunkIdRef = useRef(chunk?.id) const savedContentRef = useRef(chunkContent) const editedContentRef = useRef(editedContent) editedContentRef.current = editedContent - useEffect(() => { - if (isCreateMode) return - if (chunk?.id !== prevChunkIdRef.current) { - prevChunkIdRef.current = chunk?.id - savedContentRef.current = chunkContent - setSavedContent(chunkContent) - setEditedContent(chunkContent) - } - }, [isCreateMode, chunk?.id, chunkContent]) - useEffect(() => { if (isCreateMode || !chunk?.id) return const controller = new AbortController() diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx index f43c84dbdde..a7c0c48ba59 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx @@ -1,6 +1,6 @@ 'use client' -import { startTransition, useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { ChevronDown, ChevronUp, FileText, Pencil, Tag } from 'lucide-react' import { useParams, useRouter, useSearchParams } from 'next/navigation' @@ -47,6 +47,7 @@ import { useUpdateChunk, useUpdateDocument, } from '@/hooks/queries/kb/knowledge' +import { useDebounce } from '@/hooks/use-debounce' import { useInlineRename } from '@/hooks/use-inline-rename' const logger = createLogger('Document') @@ -152,7 +153,7 @@ export function Document({ const [showTagsModal, setShowTagsModal] = useState(false) const [searchQuery, setSearchQuery] = useState('') - const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('') + const debouncedSearchQuery = useDebounce(searchQuery, 200) const [enabledFilter, setEnabledFilter] = useState([]) const [activeSort, setActiveSort] = useState<{ column: string @@ -168,11 +169,8 @@ export function Document({ chunks: initialChunks, currentPage: initialPage, totalPages: initialTotalPages, - hasNextPage: initialHasNextPage, - hasPrevPage: initialHasPrevPage, goToPage: initialGoToPage, error: initialError, - refreshChunks: initialRefreshChunks, updateChunk: initialUpdateChunk, isFetching: isFetchingChunks, } = useDocumentChunks( @@ -207,7 +205,9 @@ export function Document({ const [selectedChunks, setSelectedChunks] = useState>(() => new Set()) // Inline editor state - const [selectedChunkId, setSelectedChunkId] = useState(null) + const [selectedChunkId, setSelectedChunkId] = useState(() => + searchParams.get('chunk') + ) const [isCreatingNewChunk, setIsCreatingNewChunk] = useState(false) const [isDirty, setIsDirty] = useState(false) const [saveStatus, setSaveStatus] = useState('idle') @@ -217,27 +217,6 @@ export function Document({ const saveStatusRef = useRef('idle') saveStatusRef.current = saveStatus - // Auto-select chunk from URL param on mount - const initialChunkParam = useRef(searchParams.get('chunk')) - useEffect(() => { - if (initialChunkParam.current) { - setSelectedChunkId(initialChunkParam.current) - initialChunkParam.current = null - } - }, []) - - useEffect(() => { - const handler = setTimeout(() => { - startTransition(() => { - setDebouncedSearchQuery(searchQuery) - }) - }, 200) - - return () => { - clearTimeout(handler) - } - }, [searchQuery]) - const isSearching = debouncedSearchQuery.trim().length > 0 const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0 const SEARCH_PAGE_SIZE = 50 @@ -259,8 +238,6 @@ export function Document({ const currentPage = showingSearch ? searchCurrentPage : initialPage const totalPages = showingSearch ? searchTotalPages : initialTotalPages - const hasNextPage = showingSearch ? searchCurrentPage < searchTotalPages : initialHasNextPage - const hasPrevPage = showingSearch ? searchCurrentPage > 1 : initialHasPrevPage // Keep refs to displayChunks and totalPages so polling callbacks can read fresh data const displayChunksRef = useRef(displayChunks) @@ -281,12 +258,11 @@ export function Document({ if (showingSearch) { return } - return await initialGoToPage(page) + return initialGoToPage(page) }, [showingSearch, initialGoToPage] ) - const refreshChunks = showingSearch ? async () => {} : initialRefreshChunks const updateChunk = showingSearch ? (_id: string, _updates: Record) => {} : initialUpdateChunk @@ -309,7 +285,6 @@ export function Document({ const { isOpen: isContextMenuOpen, position: contextMenuPosition, - menuRef, handleContextMenu: baseHandleContextMenu, closeMenu: closeContextMenu, } = useContextMenu() @@ -661,18 +636,11 @@ export function Document({ const chunk = displayChunks.find((c) => c.id === chunkId) if (!chunk) return + const newEnabled = !chunk.enabled + updateChunk(chunkId, { enabled: newEnabled }) updateChunkMutation( - { - knowledgeBaseId, - documentId, - chunkId, - enabled: !chunk.enabled, - }, - { - onSuccess: () => { - updateChunk(chunkId, { enabled: !chunk.enabled }) - }, - } + { knowledgeBaseId, documentId, chunkId, enabled: newEnabled }, + { onError: () => updateChunk(chunkId, { enabled: chunk.enabled }) } ) }, [displayChunks, knowledgeBaseId, documentId, updateChunk] diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx index a8d6a80ca83..69150737971 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx @@ -62,7 +62,7 @@ import { type TagDefinition, useKnowledgeBaseTagDefinitions, } from '@/hooks/kb/use-knowledge-base-tag-definitions' -import { useConnectorList } from '@/hooks/queries/kb/connectors' +import { isConnectorSyncingOrPending, useConnectorList } from '@/hooks/queries/kb/connectors' import type { DocumentTagFilter } from '@/hooks/queries/kb/knowledge' import { useBulkDocumentOperation, @@ -194,7 +194,7 @@ export function KnowledgeBase({ const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false }) const userPermissions = useUserPermissionsContext() - const { mutate: updateDocumentMutation } = useUpdateDocument() + const { mutate: updateDocumentMutation, mutateAsync: updateDocumentAsync } = useUpdateDocument() const { mutate: deleteDocumentMutation } = useDeleteDocument() const { mutate: deleteKnowledgeBaseMutation, isPending: isDeleting } = useDeleteKnowledgeBase(workspaceId) @@ -285,7 +285,7 @@ export function KnowledgeBase({ } = useKnowledgeBase(id) const { data: connectors = [], isLoading: isLoadingConnectors } = useConnectorList(id) - const hasSyncingConnectors = connectors.some((c) => c.status === 'syncing') + const hasSyncingConnectors = connectors.some(isConnectorSyncingOrPending) const hasSyncingConnectorsRef = useRef(hasSyncingConnectors) hasSyncingConnectorsRef.current = hasSyncingConnectors @@ -455,28 +455,16 @@ export function KnowledgeBase({ updateDocument(documentId, { filename: newName }) - return new Promise((resolve, reject) => { - updateDocumentMutation( - { - knowledgeBaseId: id, - documentId, - updates: { filename: newName }, - }, - { - onSuccess: () => { - logger.info(`Document renamed: ${documentId}`) - resolve() - }, - onError: (err) => { - if (previousName !== undefined) { - updateDocument(documentId, { filename: previousName }) - } - logger.error('Error renaming document:', err) - reject(err) - }, - } - ) - }) + try { + await updateDocumentAsync({ knowledgeBaseId: id, documentId, updates: { filename: newName } }) + logger.info(`Document renamed: ${documentId}`) + } catch (err) { + if (previousName !== undefined) { + updateDocument(documentId, { filename: previousName }) + } + logger.error('Error renaming document:', err) + throw err + } } /** diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/add-documents-modal/add-documents-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/add-documents-modal/add-documents-modal.tsx index 531fcc3f175..5ddb7eb6a20 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/add-documents-modal/add-documents-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/components/add-documents-modal/add-documents-modal.tsx @@ -195,9 +195,6 @@ export function AddDocumentsModal({ try { await uploadFiles([fileToRetry], knowledgeBaseId, { - chunkSize: chunkingConfig?.maxSize || 1024, - minCharactersPerChunk: chunkingConfig?.minSize || 1, - chunkOverlap: chunkingConfig?.overlap || 200, recipe: 'default', }) removeFile(index) @@ -217,9 +214,6 @@ export function AddDocumentsModal({ try { await uploadFiles(files, knowledgeBaseId, { - chunkSize: chunkingConfig?.maxSize || 1024, - minCharactersPerChunk: chunkingConfig?.minSize || 1, - chunkOverlap: chunkingConfig?.overlap || 200, recipe: 'default', }) logger.info(`Successfully uploaded ${files.length} files`) diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/components/base-card/base-card.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/components/base-card/base-card.tsx index d956af406fa..50933913e03 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/components/base-card/base-card.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/components/base-card/base-card.tsx @@ -20,6 +20,7 @@ interface BaseCardProps { createdAt?: string updatedAt?: string connectorTypes?: string[] + chunkingConfig?: { maxSize: number; minSize: number; overlap: number } onUpdate?: (id: string, name: string, description: string) => Promise onDelete?: (id: string) => Promise } @@ -78,6 +79,7 @@ export function BaseCard({ description, updatedAt, connectorTypes = [], + chunkingConfig, onUpdate, onDelete, }: BaseCardProps) { @@ -256,6 +258,7 @@ export function BaseCard({ knowledgeBaseId={id} initialName={title} initialDescription={description === 'No description provided' ? '' : description} + chunkingConfig={chunkingConfig} onSave={handleSave} /> )} diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx index 8a89dbf93db..a4e1e44ebc9 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/components/create-base-modal/create-base-modal.tsx @@ -269,9 +269,6 @@ export const CreateBaseModal = memo(function CreateBaseModal({ if (files.length > 0) { try { const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, { - chunkSize: data.maxChunkSize, - minCharactersPerChunk: data.minChunkSize, - chunkOverlap: data.overlapSize, recipe: 'default', }) @@ -358,12 +355,15 @@ export const CreateBaseModal = memo(function CreateBaseModal({
@@ -371,12 +371,15 @@ export const CreateBaseModal = memo(function CreateBaseModal({
@@ -385,12 +388,15 @@ export const CreateBaseModal = memo(function CreateBaseModal({

1 token ≈ 4 characters. Max chunk size and overlap are in tokens. diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/components/edit-knowledge-base-modal/edit-knowledge-base-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/components/edit-knowledge-base-modal/edit-knowledge-base-modal.tsx index 2850bd057be..22e5a57abbd 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/components/edit-knowledge-base-modal/edit-knowledge-base-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/components/edit-knowledge-base-modal/edit-knowledge-base-modal.tsx @@ -17,6 +17,7 @@ import { Textarea, } from '@/components/emcn' import { cn } from '@/lib/core/utils/cn' +import type { ChunkingConfig } from '@/lib/knowledge/types' const logger = createLogger('EditKnowledgeBaseModal') @@ -26,6 +27,7 @@ interface EditKnowledgeBaseModalProps { knowledgeBaseId: string initialName: string initialDescription: string + chunkingConfig?: ChunkingConfig onSave: (id: string, name: string, description: string) => Promise } @@ -49,6 +51,7 @@ export const EditKnowledgeBaseModal = memo(function EditKnowledgeBaseModal({ knowledgeBaseId, initialName, initialDescription, + chunkingConfig, onSave, }: EditKnowledgeBaseModalProps) { const [isSubmitting, setIsSubmitting] = useState(false) @@ -137,6 +140,47 @@ export const EditKnowledgeBaseModal = memo(function EditKnowledgeBaseModal({

)} + + {chunkingConfig && ( +
+ +
+
+

+ Max Size +

+

+ {chunkingConfig.maxSize.toLocaleString()} + + tokens + +

+
+
+

+ Min Size +

+

+ {chunkingConfig.minSize.toLocaleString()} + + chars + +

+
+
+

+ Overlap +

+

+ {chunkingConfig.overlap.toLocaleString()} + + tokens + +

+
+
+
+ )} diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts b/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts index 5dcc75ef4b7..265f3f0c7f4 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts @@ -46,9 +46,6 @@ export interface UploadError { } export interface ProcessingOptions { - chunkSize?: number - minCharactersPerChunk?: number - chunkOverlap?: number recipe?: string } @@ -1011,10 +1008,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) { ...file, })), processingOptions: { - chunkSize: processingOptions.chunkSize || 1024, - minCharactersPerChunk: processingOptions.minCharactersPerChunk || 1, - chunkOverlap: processingOptions.chunkOverlap || 200, - recipe: processingOptions.recipe || 'default', + recipe: processingOptions.recipe ?? 'default', lang: 'en', }, bulk: true, diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/knowledge.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/knowledge.tsx index 58bd4ceddae..fcdd8053abc 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/knowledge.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/knowledge.tsx @@ -1,6 +1,6 @@ 'use client' -import { useCallback, useMemo, useRef, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { useParams, useRouter } from 'next/navigation' import type { ComboboxOption } from '@/components/emcn' @@ -33,6 +33,7 @@ import { useKnowledgeBasesList } from '@/hooks/kb/use-knowledge' import { useDeleteKnowledgeBase, useUpdateKnowledgeBase } from '@/hooks/queries/kb/knowledge' import { useWorkspaceMembersQuery } from '@/hooks/queries/workspace' import { useDebounce } from '@/hooks/use-debounce' +import { usePermissionConfig } from '@/hooks/use-permission-config' const logger = createLogger('Knowledge') @@ -91,6 +92,13 @@ export function Knowledge() { const router = useRouter() const workspaceId = params.workspaceId as string + const { config: permissionConfig } = usePermissionConfig() + useEffect(() => { + if (permissionConfig.hideKnowledgeBaseTab) { + router.replace(`/workspace/${workspaceId}`) + } + }, [permissionConfig.hideKnowledgeBaseTab, router, workspaceId]) + const { knowledgeBases, isLoading, error } = useKnowledgeBasesList(workspaceId) const { data: members } = useWorkspaceMembersQuery(workspaceId) @@ -594,6 +602,7 @@ export function Knowledge() { knowledgeBaseId={activeKnowledgeBase.id} initialName={activeKnowledgeBase.name} initialDescription={activeKnowledgeBase.description || ''} + chunkingConfig={activeKnowledgeBase.chunkingConfig} onSave={handleUpdateKnowledgeBase} /> )} diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/loading.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/loading.tsx index 9011fcccca6..dbfd329e93d 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/loading.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/loading.tsx @@ -1,7 +1,7 @@ import { Skeleton } from '@/components/emcn' const SKELETON_ROW_COUNT = 5 -const COLUMN_COUNT = 6 +const COLUMN_COUNT = 7 export default function KnowledgeLoading() { return ( diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx index d52602721a6..be3743be659 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/page.tsx @@ -1,37 +1,8 @@ import type { Metadata } from 'next' -import { redirect } from 'next/navigation' -import { getSession } from '@/lib/auth' -import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' -import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check' import { Knowledge } from './knowledge' export const metadata: Metadata = { title: 'Knowledge Base', } -interface KnowledgePageProps { - params: Promise<{ - workspaceId: string - }> -} - -export default async function KnowledgePage({ params }: KnowledgePageProps) { - const { workspaceId } = await params - const session = await getSession() - - if (!session?.user?.id) { - redirect('/') - } - - const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) - if (!hasPermission) { - redirect('/') - } - - const permissionConfig = await getUserPermissionConfig(session.user.id) - if (permissionConfig?.hideKnowledgeBaseTab) { - redirect(`/workspace/${workspaceId}`) - } - - return -} +export default Knowledge diff --git a/apps/sim/app/workspace/[workspaceId]/layout.tsx b/apps/sim/app/workspace/[workspaceId]/layout.tsx index ad7b57e437f..075d1f2d39a 100644 --- a/apps/sim/app/workspace/[workspaceId]/layout.tsx +++ b/apps/sim/app/workspace/[workspaceId]/layout.tsx @@ -5,6 +5,7 @@ import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/ import { ProviderModelsLoader } from '@/app/workspace/[workspaceId]/providers/provider-models-loader' import { SettingsLoader } from '@/app/workspace/[workspaceId]/providers/settings-loader' import { WorkspacePermissionsProvider } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' +import { WorkspaceScopeSync } from '@/app/workspace/[workspaceId]/providers/workspace-scope-sync' import { Sidebar } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar' export default function WorkspaceLayout({ children }: { children: React.ReactNode }) { @@ -16,6 +17,7 @@ export default function WorkspaceLayout({ children }: { children: React.ReactNod
+
diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/components/workflows-list/workflows-list.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/components/workflows-list/workflows-list.tsx index 70915c2b8f3..ee52e0fa0bc 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/components/workflows-list/workflows-list.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/components/workflows-list/workflows-list.tsx @@ -1,10 +1,11 @@ import { memo } from 'react' +import { useParams } from 'next/navigation' import { cn } from '@/lib/core/utils/cn' import { DELETED_WORKFLOW_COLOR, DELETED_WORKFLOW_LABEL, } from '@/app/workspace/[workspaceId]/logs/utils' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import { useWorkflowMap } from '@/hooks/queries/workflows' import { StatusBar, type StatusBarSegment } from '..' export interface WorkflowExecutionItem { @@ -36,7 +37,8 @@ function WorkflowsListInner({ searchQuery: string segmentDurationMs: number }) { - const workflows = useWorkflowRegistry((s) => s.workflows) + const { workspaceId } = useParams<{ workspaceId: string }>() + const { data: workflows = {} } = useWorkflowMap(workspaceId) return (
diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx index e19df1fc194..f0ff6ca54b7 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx @@ -2,12 +2,13 @@ import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react' import { Loader2 } from 'lucide-react' +import { useParams } from 'next/navigation' import { useShallow } from 'zustand/react/shallow' import { Skeleton } from '@/components/emcn' import { formatLatency } from '@/app/workspace/[workspaceId]/logs/utils' import type { DashboardStatsResponse, WorkflowStats } from '@/hooks/queries/logs' +import { useWorkflows } from '@/hooks/queries/workflows' import { useFilterStore } from '@/stores/logs/filters/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { LineChart, WorkflowsList } from './components' interface WorkflowExecution { @@ -156,7 +157,8 @@ function DashboardInner({ stats, isLoading, error }: DashboardProps) { })) ) - const allWorkflows = useWorkflowRegistry((state) => state.workflows) + const { workspaceId } = useParams<{ workspaceId: string }>() + const { data: allWorkflowList = [], isPending: isWorkflowsPending } = useWorkflows(workspaceId) const expandedWorkflowId = workflowIds.length === 1 ? workflowIds[0] : null @@ -459,7 +461,7 @@ function DashboardInner({ stats, isLoading, error }: DashboardProps) { ) } - if (Object.keys(allWorkflows).length === 0) { + if (!isWorkflowsPending && allWorkflowList.length === 0) { return (
diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/notifications/components/workflow-selector/workflow-selector.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/notifications/components/workflow-selector/workflow-selector.tsx index 02c3173bd83..f0172a1ee17 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/notifications/components/workflow-selector/workflow-selector.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/notifications/components/workflow-selector/workflow-selector.tsx @@ -25,9 +25,7 @@ export function WorkflowSelector({ onChange, error, }: WorkflowSelectorProps) { - const { data: workflows = [], isPending: isLoading } = useWorkflows(workspaceId, { - syncRegistry: false, - }) + const { data: workflows = [], isPending: isLoading } = useWorkflows(workspaceId) const options: ComboboxOption[] = useMemo(() => { return workflows.map((w) => ({ diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/search/search.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/search/search.tsx index cb8b795276b..b895f447b57 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/search/search.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/components/search/search.tsx @@ -3,6 +3,7 @@ import { useEffect, useMemo, useRef, useState } from 'react' import * as DropdownMenuPrimitive from '@radix-ui/react-dropdown-menu' import { Search, X } from 'lucide-react' +import { useParams } from 'next/navigation' import { Badge } from '@/components/emcn' import { cn } from '@/lib/core/utils/cn' import { getTriggerOptions } from '@/lib/logs/get-trigger-options' @@ -14,8 +15,8 @@ import { type WorkflowData, } from '@/lib/logs/search-suggestions' import { useSearchState } from '@/app/workspace/[workspaceId]/logs/hooks/use-search-state' -import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import { useFolderMap } from '@/hooks/queries/folders' +import { useWorkflows } from '@/hooks/queries/workflows' function truncateFilterValue(field: string, value: string): string { if ((field === 'executionId' || field === 'workflowId') && value.length > 12) { @@ -42,16 +43,17 @@ export function AutocompleteSearch({ className, onOpenChange, }: AutocompleteSearchProps) { - const workflows = useWorkflowRegistry((state) => state.workflows) - const folders = useFolderStore((state) => state.folders) + const { workspaceId } = useParams<{ workspaceId: string }>() + const { data: workflowList = [] } = useWorkflows(workspaceId) + const { data: folders = {} } = useFolderMap(workspaceId) const workflowsData = useMemo(() => { - return Object.values(workflows).map((w) => ({ + return workflowList.map((w) => ({ id: w.id, name: w.name, description: w.description, })) - }, [workflows]) + }, [workflowList]) const foldersData = useMemo(() => { return Object.values(folders).map((f) => ({ @@ -103,6 +105,7 @@ export function AutocompleteSearch({ } = useSearchState({ onFiltersChange: handleFiltersChange, getSuggestions: (input) => suggestionEngine.getSuggestions(input), + initialQuery: value, }) const lastExternalValue = useRef(value) @@ -114,14 +117,6 @@ export function AutocompleteSearch({ } }, [value, initializeFromQuery]) - useEffect(() => { - if (value) { - const parsed = parseQuery(value) - initializeFromQuery(parsed.textSearch, parsed.filters) - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []) - const [dropdownWidth, setDropdownWidth] = useState(400) useEffect(() => { const measure = () => { diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/logs-toolbar.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/logs-toolbar.tsx index 542a64ca0ed..04926c08665 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/logs-toolbar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/logs-toolbar/logs-toolbar.tsx @@ -20,10 +20,10 @@ import { hasActiveFilters } from '@/lib/logs/filters' import { getTriggerOptions } from '@/lib/logs/get-trigger-options' import { type LogStatus, STATUS_CONFIG } from '@/app/workspace/[workspaceId]/logs/utils' import { getBlock } from '@/blocks/registry' -import { useFolderStore } from '@/stores/folders/store' +import { useFolderMap } from '@/hooks/queries/folders' +import { useWorkflows } from '@/hooks/queries/workflows' import { useFilterStore } from '@/stores/logs/filters/store' import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { AutocompleteSearch } from './components/search' const TIME_RANGE_OPTIONS: ComboboxOption[] = [ @@ -218,17 +218,17 @@ export const LogsToolbar = memo(function LogsToolbar({ const [datePickerOpen, setDatePickerOpen] = useState(false) const [previousTimeRange, setPreviousTimeRange] = useState(timeRange) - const folders = useFolderStore((state) => state.folders) + const { data: folders = {} } = useFolderMap(workspaceId) - const allWorkflows = useWorkflowRegistry((state) => state.workflows) + const { data: allWorkflowList = [] } = useWorkflows(workspaceId) const workflows = useMemo(() => { - return Object.values(allWorkflows).map((w) => ({ + return allWorkflowList.map((w) => ({ id: w.id, name: w.name, color: w.color, })) - }, [allWorkflows]) + }, [allWorkflowList]) const folderList = useMemo(() => { return Object.values(folders).filter((f) => f.workspaceId === workspaceId) diff --git a/apps/sim/app/workspace/[workspaceId]/logs/hooks/use-search-state.ts b/apps/sim/app/workspace/[workspaceId]/logs/hooks/use-search-state.ts index 1e4647dec56..19af1dd346f 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/hooks/use-search-state.ts +++ b/apps/sim/app/workspace/[workspaceId]/logs/hooks/use-search-state.ts @@ -1,5 +1,6 @@ import { useCallback, useRef, useState } from 'react' import type { ParsedFilter } from '@/lib/logs/query-parser' +import { parseQuery } from '@/lib/logs/query-parser' import type { Suggestion, SuggestionGroup, @@ -10,16 +11,21 @@ interface UseSearchStateOptions { onFiltersChange: (filters: ParsedFilter[], textSearch: string) => void getSuggestions: (input: string) => SuggestionGroup | null debounceMs?: number + initialQuery?: string } export function useSearchState({ onFiltersChange, getSuggestions, debounceMs = 100, + initialQuery, }: UseSearchStateOptions) { - const [appliedFilters, setAppliedFilters] = useState([]) + const [initialParsed] = useState(() => + initialQuery ? parseQuery(initialQuery) : { filters: [] as ParsedFilter[], textSearch: '' } + ) + const [appliedFilters, setAppliedFilters] = useState(initialParsed.filters) const [currentInput, setCurrentInput] = useState('') - const [textSearch, setTextSearch] = useState('') + const [textSearch, setTextSearch] = useState(initialParsed.textSearch) const [isOpen, setIsOpen] = useState(false) const [suggestions, setSuggestions] = useState([]) @@ -84,7 +90,7 @@ export function useSearchState({ } const newFilter: ParsedFilter = { - field: suggestion.value.split(':')[0] as any, + field: suggestion.value.split(':')[0], operator: '=', value: suggestion.value.includes(':') ? suggestion.value.split(':').slice(1).join(':').replace(/"/g, '') diff --git a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx index 098f23158a8..a939a859c1b 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx @@ -50,19 +50,18 @@ import { useSearchState } from '@/app/workspace/[workspaceId]/logs/hooks/use-sea import type { Suggestion } from '@/app/workspace/[workspaceId]/logs/types' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { getBlock } from '@/blocks/registry' -import { useFolders } from '@/hooks/queries/folders' +import { useFolderMap, useFolders } from '@/hooks/queries/folders' import { prefetchLogDetail, useDashboardStats, useLogDetail, useLogsList, } from '@/hooks/queries/logs' +import { useWorkflowMap, useWorkflows } from '@/hooks/queries/workflows' import { useDebounce } from '@/hooks/use-debounce' -import { useFolderStore } from '@/stores/folders/store' import { useFilterStore } from '@/stores/logs/filters/store' import type { WorkflowLog } from '@/stores/logs/filters/types' import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { Dashboard, ExecutionSnapshot, @@ -266,19 +265,20 @@ export default function Logs() { isSidebarOpen: false, }) const isInitialized = useRef(false) - const pendingExecutionIdRef = useRef(null) + const pendingExecutionIdRef = useRef(undefined) + if (pendingExecutionIdRef.current === undefined) { + pendingExecutionIdRef.current = + typeof window !== 'undefined' + ? new URLSearchParams(window.location.search).get('executionId') + : null + } - const [searchQuery, setSearchQuery] = useState('') + const [searchQuery, setSearchQuery] = useState(() => { + if (typeof window === 'undefined') return '' + return new URLSearchParams(window.location.search).get('search') ?? '' + }) const debouncedSearchQuery = useDebounce(searchQuery, 300) - useEffect(() => { - const params = new URLSearchParams(window.location.search) - const urlSearch = params.get('search') - if (urlSearch) setSearchQuery(urlSearch) - const urlExecutionId = params.get('executionId') - if (urlExecutionId) pendingExecutionIdRef.current = urlExecutionId - }, []) - const isLive = true const [isVisuallyRefreshing, setIsVisuallyRefreshing] = useState(false) const [isExporting, setIsExporting] = useState(false) @@ -783,8 +783,8 @@ export default function Logs() { ] ) - const allWorkflows = useWorkflowRegistry((state) => state.workflows) - const folders = useFolderStore((state) => state.folders) + const { data: allWorkflows = {} } = useWorkflowMap(workspaceId) + const { data: folders = {} } = useFolderMap(workspaceId) const filterTags = useMemo(() => { const tags: FilterTag[] = [] @@ -1243,12 +1243,12 @@ function LogsFilterPanel({ searchQuery, onSearchQueryChange }: LogsFilterPanelPr const [datePickerOpen, setDatePickerOpen] = useState(false) const [previousTimeRange, setPreviousTimeRange] = useState(timeRange) - const folders = useFolderStore((state) => state.folders) - const allWorkflows = useWorkflowRegistry((state) => state.workflows) + const { data: folders = {} } = useFolderMap(workspaceId) + const { data: allWorkflowList = [] } = useWorkflows(workspaceId) const workflows = useMemo( - () => Object.values(allWorkflows).map((w) => ({ id: w.id, name: w.name, color: w.color })), - [allWorkflows] + () => allWorkflowList.map((w) => ({ id: w.id, name: w.name, color: w.color })), + [allWorkflowList] ) const folderList = useMemo( diff --git a/apps/sim/app/workspace/[workspaceId]/providers/provider-models-loader.tsx b/apps/sim/app/workspace/[workspaceId]/providers/provider-models-loader.tsx index 06344ae7592..f83d9e63bb0 100644 --- a/apps/sim/app/workspace/[workspaceId]/providers/provider-models-loader.tsx +++ b/apps/sim/app/workspace/[workspaceId]/providers/provider-models-loader.tsx @@ -2,8 +2,10 @@ import { useEffect } from 'react' import { createLogger } from '@sim/logger' +import { useParams } from 'next/navigation' import { useProviderModels } from '@/hooks/queries/providers' import { + updateFireworksProviderModels, updateOllamaProviderModels, updateOpenRouterProviderModels, updateVLLMProviderModels, @@ -12,11 +14,11 @@ import { type ProviderName, useProvidersStore } from '@/stores/providers' const logger = createLogger('ProviderModelsLoader') -function useSyncProvider(provider: ProviderName) { +function useSyncProvider(provider: ProviderName, workspaceId?: string) { const setProviderModels = useProvidersStore((state) => state.setProviderModels) const setProviderLoading = useProvidersStore((state) => state.setProviderLoading) const setOpenRouterModelInfo = useProvidersStore((state) => state.setOpenRouterModelInfo) - const { data, isLoading, isFetching, error } = useProviderModels(provider) + const { data, isLoading, isFetching, error } = useProviderModels(provider, workspaceId) useEffect(() => { setProviderLoading(provider, isLoading || isFetching) @@ -35,6 +37,8 @@ function useSyncProvider(provider: ProviderName) { if (data.modelInfo) { setOpenRouterModelInfo(data.modelInfo) } + } else if (provider === 'fireworks') { + void updateFireworksProviderModels(data.models) } } catch (syncError) { logger.warn(`Failed to sync provider definitions for ${provider}`, syncError as Error) @@ -51,9 +55,13 @@ function useSyncProvider(provider: ProviderName) { } export function ProviderModelsLoader() { + const params = useParams() + const workspaceId = params?.workspaceId as string | undefined + useSyncProvider('base') useSyncProvider('ollama') useSyncProvider('vllm') useSyncProvider('openrouter') + useSyncProvider('fireworks', workspaceId) return null } diff --git a/apps/sim/app/workspace/[workspaceId]/providers/workspace-scope-sync.tsx b/apps/sim/app/workspace/[workspaceId]/providers/workspace-scope-sync.tsx new file mode 100644 index 00000000000..6ffcb96facc --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/providers/workspace-scope-sync.tsx @@ -0,0 +1,24 @@ +'use client' + +import { useEffect } from 'react' +import { useParams } from 'next/navigation' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' + +/** + * Keeps workflow registry workspace scope synchronized with the current route. + */ +export function WorkspaceScopeSync() { + const { workspaceId } = useParams<{ workspaceId: string }>() + const hydrationWorkspaceId = useWorkflowRegistry((state) => state.hydration.workspaceId) + const switchToWorkspace = useWorkflowRegistry((state) => state.switchToWorkspace) + + useEffect(() => { + if (!workspaceId || hydrationWorkspaceId === workspaceId) { + return + } + + switchToWorkspace(workspaceId) + }, [hydrationWorkspaceId, switchToWorkspace, workspaceId]) + + return null +} diff --git a/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/components/create-schedule-modal/schedule-modal.tsx b/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/components/create-schedule-modal/schedule-modal.tsx index abcafb2974c..50806f02a0a 100644 --- a/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/components/create-schedule-modal/schedule-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/components/create-schedule-modal/schedule-modal.tsx @@ -1,6 +1,6 @@ 'use client' -import { useEffect, useMemo, useState } from 'react' +import { useMemo, useState } from 'react' import { createLogger } from '@sim/logger' import { Button, @@ -151,49 +151,46 @@ function buildCronExpression( } } +/** + * Modal for creating and editing scheduled tasks. + * + * All `useState` initializers read from the `schedule` prop at mount time only. + * When editing an existing task, the call-site **must** supply a `key` prop equal to the + * task's ID so React remounts the component when the selected task changes — otherwise + * the form will display stale values from the previously selected task. + */ export function ScheduleModal({ open, onOpenChange, workspaceId, schedule }: ScheduleModalProps) { const createScheduleMutation = useCreateSchedule() const updateScheduleMutation = useUpdateSchedule() const isEditing = Boolean(schedule) - const [title, setTitle] = useState('') - const [prompt, setPrompt] = useState('') - const [scheduleType, setScheduleType] = useState('daily') - const [minutesInterval, setMinutesInterval] = useState('15') - const [hourlyMinute, setHourlyMinute] = useState('0') - const [dailyTime, setDailyTime] = useState('09:00') - const [weeklyDay, setWeeklyDay] = useState('MON') - const [weeklyDayTime, setWeeklyDayTime] = useState('09:00') - const [monthlyDay, setMonthlyDay] = useState('1') - const [monthlyTime, setMonthlyTime] = useState('09:00') - const [cronExpression, setCronExpression] = useState('') - const [timezone, setTimezone] = useState(DEFAULT_TIMEZONE) + const initialCronState = useMemo( + () => (schedule ? parseCronToScheduleType(schedule.cronExpression) : null), + [schedule] + ) + + const [title, setTitle] = useState(schedule?.jobTitle ?? '') + const [prompt, setPrompt] = useState(schedule?.prompt ?? '') + const [scheduleType, setScheduleType] = useState( + initialCronState?.scheduleType ?? 'daily' + ) + const [minutesInterval, setMinutesInterval] = useState(initialCronState?.minutesInterval ?? '15') + const [hourlyMinute, setHourlyMinute] = useState(initialCronState?.hourlyMinute ?? '0') + const [dailyTime, setDailyTime] = useState(initialCronState?.dailyTime ?? '09:00') + const [weeklyDay, setWeeklyDay] = useState(initialCronState?.weeklyDay ?? 'MON') + const [weeklyDayTime, setWeeklyDayTime] = useState(initialCronState?.weeklyDayTime ?? '09:00') + const [monthlyDay, setMonthlyDay] = useState(initialCronState?.monthlyDay ?? '1') + const [monthlyTime, setMonthlyTime] = useState(initialCronState?.monthlyTime ?? '09:00') + const [cronExpression, setCronExpression] = useState(initialCronState?.cronExpression ?? '') + const [timezone, setTimezone] = useState(schedule?.timezone ?? DEFAULT_TIMEZONE) const [startDate, setStartDate] = useState('') - const [lifecycle, setLifecycle] = useState<'persistent' | 'until_complete'>('persistent') - const [maxRuns, setMaxRuns] = useState('') + const [lifecycle, setLifecycle] = useState<'persistent' | 'until_complete'>( + schedule?.lifecycle === 'until_complete' ? 'until_complete' : 'persistent' + ) + const [maxRuns, setMaxRuns] = useState(schedule?.maxRuns ? String(schedule.maxRuns) : '') const [submitError, setSubmitError] = useState(null) - useEffect(() => { - if (!open || !schedule) return - const cronState = parseCronToScheduleType(schedule.cronExpression) - setTitle(schedule.jobTitle || '') - setPrompt(schedule.prompt || '') - setScheduleType(cronState.scheduleType) - setMinutesInterval(cronState.minutesInterval) - setHourlyMinute(cronState.hourlyMinute) - setDailyTime(cronState.dailyTime) - setWeeklyDay(cronState.weeklyDay) - setWeeklyDayTime(cronState.weeklyDayTime) - setMonthlyDay(cronState.monthlyDay) - setMonthlyTime(cronState.monthlyTime) - setCronExpression(cronState.cronExpression) - setTimezone(schedule.timezone || DEFAULT_TIMEZONE) - setLifecycle(schedule.lifecycle === 'until_complete' ? 'until_complete' : 'persistent') - setMaxRuns(schedule.maxRuns ? String(schedule.maxRuns) : '') - setStartDate('') - }, [open, schedule]) - const computedCron = useMemo( () => buildCronExpression(scheduleType, { diff --git a/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/page.tsx b/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/page.tsx index 9135138d28e..1f0fadd8bfa 100644 --- a/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/page.tsx @@ -1,31 +1,8 @@ import type { Metadata } from 'next' -import { redirect } from 'next/navigation' -import { getSession } from '@/lib/auth' -import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' import { ScheduledTasks } from './scheduled-tasks' export const metadata: Metadata = { title: 'Scheduled Tasks', } -interface ScheduledTasksPageProps { - params: Promise<{ - workspaceId: string - }> -} - -export default async function ScheduledTasksPage({ params }: ScheduledTasksPageProps) { - const { workspaceId } = await params - const session = await getSession() - - if (!session?.user?.id) { - redirect('/') - } - - const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) - if (!hasPermission) { - redirect('/') - } - - return -} +export default ScheduledTasks diff --git a/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/scheduled-tasks.tsx b/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/scheduled-tasks.tsx index 2fb6fb3e47c..69165d11798 100644 --- a/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/scheduled-tasks.tsx +++ b/apps/sim/app/workspace/[workspaceId]/scheduled-tasks/scheduled-tasks.tsx @@ -433,6 +433,7 @@ export function ScheduledTasks() { /> { setIsEditModalOpen(open) diff --git a/apps/sim/app/workspace/[workspaceId]/settings/components/byok/byok.tsx b/apps/sim/app/workspace/[workspaceId]/settings/components/byok/byok.tsx index c6050f18800..315d80594c6 100644 --- a/apps/sim/app/workspace/[workspaceId]/settings/components/byok/byok.tsx +++ b/apps/sim/app/workspace/[workspaceId]/settings/components/byok/byok.tsx @@ -18,6 +18,7 @@ import { BrandfetchIcon, ExaAIIcon, FirecrawlIcon, + FireworksIcon, GeminiIcon, GoogleIcon, JinaAIIcon, @@ -75,6 +76,13 @@ const PROVIDERS: { description: 'LLM calls and Knowledge Base OCR', placeholder: 'Enter your API key', }, + { + id: 'fireworks', + name: 'Fireworks', + icon: FireworksIcon, + description: 'LLM calls', + placeholder: 'Enter your Fireworks API key', + }, { id: 'firecrawl', name: 'Firecrawl', diff --git a/apps/sim/app/workspace/[workspaceId]/settings/components/recently-deleted/recently-deleted.tsx b/apps/sim/app/workspace/[workspaceId]/settings/components/recently-deleted/recently-deleted.tsx index 8fdb3500bcc..c9b3f37a48c 100644 --- a/apps/sim/app/workspace/[workspaceId]/settings/components/recently-deleted/recently-deleted.tsx +++ b/apps/sim/app/workspace/[workspaceId]/settings/components/recently-deleted/recently-deleted.tsx @@ -119,7 +119,7 @@ export function RecentlyDeleted() { const [restoringIds, setRestoringIds] = useState>(new Set()) const [restoredItems, setRestoredItems] = useState>(new Map()) - const workflowsQuery = useWorkflows(workspaceId, { syncRegistry: false, scope: 'archived' }) + const workflowsQuery = useWorkflows(workspaceId, { scope: 'archived' }) const tablesQuery = useTablesList(workspaceId, 'archived') const knowledgeQuery = useKnowledgeBasesQuery(workspaceId, { scope: 'archived' }) const filesQuery = useWorkspaceFiles(workspaceId, 'archived') @@ -245,7 +245,10 @@ export function RecentlyDeleted() { switch (resource.type) { case 'workflow': - restoreWorkflow.mutate(resource.id, { onSettled, onSuccess }) + restoreWorkflow.mutate( + { workflowId: resource.id, workspaceId: resource.workspaceId }, + { onSettled, onSuccess } + ) break case 'table': restoreTable.mutate(resource.id, { onSettled, onSuccess }) diff --git a/apps/sim/app/workspace/[workspaceId]/tables/loading.tsx b/apps/sim/app/workspace/[workspaceId]/tables/loading.tsx index eda84842abf..a0c2aa9068b 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/loading.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/loading.tsx @@ -27,9 +27,6 @@ export default function TablesLoading() { - {Array.from({ length: COLUMN_COUNT }).map((_, i) => ( {Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => ( - {Array.from({ length: COLUMN_COUNT }).map((_, colIndex) => (
- - @@ -40,9 +37,6 @@ export default function TablesLoading() {
- - -} - -export default async function TablesPage({ params }: TablesPageProps) { - const { workspaceId } = await params - const session = await getSession() - - if (!session?.user?.id) { - redirect('/') - } - - const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) - if (!hasPermission) { - redirect('/') - } - - const permissionConfig = await getUserPermissionConfig(session.user.id) - if (permissionConfig?.hideTablesTab) { - redirect(`/workspace/${workspaceId}`) - } - - return -} +export default Tables diff --git a/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx b/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx index 95ed0801d9f..a26aa3a18ed 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/tables.tsx @@ -1,6 +1,6 @@ 'use client' -import { useCallback, useMemo, useRef, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { useParams, useRouter } from 'next/navigation' import type { ComboboxOption } from '@/components/emcn' @@ -38,6 +38,7 @@ import { } from '@/hooks/queries/tables' import { useWorkspaceMembersQuery } from '@/hooks/queries/workspace' import { useDebounce } from '@/hooks/use-debounce' +import { usePermissionConfig } from '@/hooks/use-permission-config' const logger = createLogger('Tables') @@ -54,6 +55,14 @@ export function Tables() { const params = useParams() const router = useRouter() const workspaceId = params.workspaceId as string + + const { config: permissionConfig } = usePermissionConfig() + useEffect(() => { + if (permissionConfig.hideTablesTab) { + router.replace(`/workspace/${workspaceId}`) + } + }, [permissionConfig.hideTablesTab, router, workspaceId]) + const userPermissions = useUserPermissionsContext() const { data: tables = [], isLoading, error } = useTablesList(workspaceId) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx index 863db7529a0..505795fc5c1 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/chat/chat.tsx @@ -1143,13 +1143,15 @@ export function Chat() { {isStreaming ? ( ) : ( diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-drag-drop.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-drag-drop.ts index 4e86d19749e..30dd068ef5f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-drag-drop.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-drag-drop.ts @@ -1,17 +1,18 @@ import { useCallback, useEffect, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { useParams } from 'next/navigation' +import { getFolderPath } from '@/lib/folders/tree' import { useReorderFolders } from '@/hooks/queries/folders' +import { getFolderMap } from '@/hooks/queries/utils/folder-cache' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' import { useReorderWorkflows } from '@/hooks/queries/workflows' import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('WorkflowList:DragDrop') const SCROLL_THRESHOLD = 60 const SCROLL_SPEED = 8 const HOVER_EXPAND_DELAY = 400 -const DRAG_OVER_THROTTLE_MS = 16 export interface DropIndicator { targetId: string @@ -30,21 +31,35 @@ type SiblingItem = { createdAt: Date } +/** Root folder vs root workflow scope: API/cache may use null or undefined for "no parent". */ +function isSameFolderScope( + parentOrFolderId: string | null | undefined, + scope: string | null +): boolean { + return (parentOrFolderId ?? null) === (scope ?? null) +} + export function useDragDrop(options: UseDragDropOptions = {}) { const { disabled = false } = options const [dropIndicator, setDropIndicator] = useState(null) + /** + * Mirrors `dropIndicator` synchronously. `drop` can fire before React commits the last + * `dragOver` state update, so `handleDrop` must read this ref instead of state. + */ + const dropIndicatorRef = useRef(null) const [isDragging, setIsDragging] = useState(false) const [hoverFolderId, setHoverFolderId] = useState(null) const scrollContainerRef = useRef(null) const scrollAnimationRef = useRef(null) const hoverExpandTimerRef = useRef(null) const lastDragYRef = useRef(0) - const lastDragOverTimeRef = useRef(0) const draggedSourceFolderRef = useRef(null) const siblingsCacheRef = useRef>(new Map()) + const isDraggingRef = useRef(false) const params = useParams() const workspaceId = params.workspaceId as string | undefined + const reorderWorkflowsMutation = useReorderWorkflows() const reorderFoldersMutation = useReorderFolders() const setExpanded = useFolderStore((s) => s.setExpanded) @@ -125,6 +140,10 @@ export function useDragDrop(options: UseDragDropOptions = {}) { } }, [hoverFolderId, isDragging, expandedFolders, setExpanded]) + useEffect(() => { + siblingsCacheRef.current.clear() + }, [workspaceId]) + const calculateDropPosition = useCallback( (e: React.DragEvent, element: HTMLElement): 'before' | 'after' => { const rect = element.getBoundingClientRect() @@ -162,12 +181,28 @@ export function useDragDrop(options: UseDragDropOptions = {}) { : indicator.folderId }, []) - const calculateInsertIndex = useCallback( - (remaining: SiblingItem[], indicator: DropIndicator): number => { - return indicator.position === 'inside' - ? remaining.length - : remaining.findIndex((item) => item.id === indicator.targetId) + - (indicator.position === 'after' ? 1 : 0) + /** + * Insert index into the list of siblings **excluding** moving items. Must use the full + * `siblingItems` list for lookup: when the drop line targets the dragged row, + * `indicator.targetId` is not present in `remaining`, so indexing `remaining` alone + * returns -1 and corrupts the splice. + */ + const getInsertIndexInRemaining = useCallback( + (siblingItems: SiblingItem[], movingIds: Set, indicator: DropIndicator): number => { + if (indicator.position === 'inside') { + return siblingItems.filter((s) => !movingIds.has(s.id)).length + } + + const targetIdx = siblingItems.findIndex((s) => s.id === indicator.targetId) + if (targetIdx === -1) { + return siblingItems.filter((s) => !movingIds.has(s.id)).length + } + + if (indicator.position === 'before') { + return siblingItems.slice(0, targetIdx).filter((s) => !movingIds.has(s.id)).length + } + + return siblingItems.slice(0, targetIdx + 1).filter((s) => !movingIds.has(s.id)).length }, [] ) @@ -215,57 +250,65 @@ export function useDragDrop(options: UseDragDropOptions = {}) { lastDragYRef.current = e.clientY if (!isDragging) { + isDraggingRef.current = true setIsDragging(true) } - const now = performance.now() - if (now - lastDragOverTimeRef.current < DRAG_OVER_THROTTLE_MS) { - return false - } - lastDragOverTimeRef.current = now return true }, [isDragging] ) - const getSiblingItems = useCallback((folderId: string | null): SiblingItem[] => { - const cacheKey = folderId ?? 'root' - const cached = siblingsCacheRef.current.get(cacheKey) - if (cached) return cached - - const currentFolders = useFolderStore.getState().folders - const currentWorkflows = useWorkflowRegistry.getState().workflows - const siblings = [ - ...Object.values(currentFolders) - .filter((f) => f.parentId === folderId) - .map((f) => ({ - type: 'folder' as const, - id: f.id, - sortOrder: f.sortOrder, - createdAt: f.createdAt, - })), - ...Object.values(currentWorkflows) - .filter((w) => w.folderId === folderId) - .map((w) => ({ - type: 'workflow' as const, - id: w.id, - sortOrder: w.sortOrder, - createdAt: w.createdAt, - })), - ].sort(compareSiblingItems) - - siblingsCacheRef.current.set(cacheKey, siblings) - return siblings - }, []) + const getSiblingItems = useCallback( + (folderId: string | null): SiblingItem[] => { + const cacheKey = folderId ?? 'root' + if (!isDraggingRef.current) { + const cached = siblingsCacheRef.current.get(cacheKey) + if (cached) return cached + } + + const currentFolders = workspaceId ? getFolderMap(workspaceId) : {} + const currentWorkflows = workspaceId ? getWorkflows(workspaceId) : [] + const siblings = [ + ...Object.values(currentFolders) + .filter((f) => isSameFolderScope(f.parentId, folderId)) + .map((f) => ({ + type: 'folder' as const, + id: f.id, + sortOrder: f.sortOrder, + createdAt: f.createdAt, + })), + ...currentWorkflows + .filter((w) => isSameFolderScope(w.folderId, folderId)) + .map((w) => ({ + type: 'workflow' as const, + id: w.id, + sortOrder: w.sortOrder, + createdAt: w.createdAt, + })), + ].sort(compareSiblingItems) + + if (!isDraggingRef.current) { + siblingsCacheRef.current.set(cacheKey, siblings) + } + return siblings + }, + [workspaceId] + ) const setNormalizedDropIndicator = useCallback( (indicator: DropIndicator | null) => { - setDropIndicator((prev) => { - let next: DropIndicator | null = indicator + if (indicator === null) { + dropIndicatorRef.current = null + setDropIndicator(null) + return + } - if (indicator && indicator.position === 'after' && indicator.targetId !== 'root') { - const siblings = getSiblingItems(indicator.folderId) - const currentIdx = siblings.findIndex((s) => s.id === indicator.targetId) + let next: DropIndicator = indicator + if (indicator.position === 'after' && indicator.targetId !== 'root') { + const siblings = getSiblingItems(indicator.folderId) + const currentIdx = siblings.findIndex((s) => s.id === indicator.targetId) + if (currentIdx !== -1) { const nextSibling = siblings[currentIdx + 1] if (nextSibling) { next = { @@ -275,15 +318,18 @@ export function useDragDrop(options: UseDragDropOptions = {}) { } } } + } + setDropIndicator((prev) => { if ( - prev?.targetId === next?.targetId && - prev?.position === next?.position && - prev?.folderId === next?.folderId + prev?.targetId === next.targetId && + prev?.position === next.position && + prev?.folderId === next.folderId ) { + dropIndicatorRef.current = prev return prev } - + dropIndicatorRef.current = next return next }) }, @@ -294,10 +340,11 @@ export function useDragDrop(options: UseDragDropOptions = {}) { (folderId: string, destinationFolderId: string | null): boolean => { if (folderId === destinationFolderId) return false if (!destinationFolderId) return true - const targetPath = useFolderStore.getState().getFolderPath(destinationFolderId) + if (!workspaceId) return false + const targetPath = getFolderPath(getFolderMap(workspaceId), destinationFolderId) return !targetPath.some((f) => f.id === folderId) }, - [] + [workspaceId] ) const collectMovingItems = useCallback( @@ -306,14 +353,14 @@ export function useDragDrop(options: UseDragDropOptions = {}) { folderIds: string[], destinationFolderId: string | null ): { fromDestination: SiblingItem[]; fromOther: SiblingItem[] } => { - const { folders } = useFolderStore.getState() - const { workflows } = useWorkflowRegistry.getState() + const folders = workspaceId ? getFolderMap(workspaceId) : {} + const workflows = workspaceId ? getWorkflows(workspaceId) : [] const fromDestination: SiblingItem[] = [] const fromOther: SiblingItem[] = [] for (const id of workflowIds) { - const workflow = workflows[id] + const workflow = workflows.find((w) => w.id === id) if (!workflow) continue const item: SiblingItem = { type: 'workflow', @@ -321,7 +368,7 @@ export function useDragDrop(options: UseDragDropOptions = {}) { sortOrder: workflow.sortOrder, createdAt: workflow.createdAt, } - if (workflow.folderId === destinationFolderId) { + if (isSameFolderScope(workflow.folderId, destinationFolderId)) { fromDestination.push(item) } else { fromOther.push(item) @@ -337,7 +384,7 @@ export function useDragDrop(options: UseDragDropOptions = {}) { sortOrder: folder.sortOrder, createdAt: folder.createdAt, } - if (folder.parentId === destinationFolderId) { + if (isSameFolderScope(folder.parentId, destinationFolderId)) { fromDestination.push(item) } else { fromOther.push(item) @@ -349,7 +396,7 @@ export function useDragDrop(options: UseDragDropOptions = {}) { return { fromDestination, fromOther } }, - [] + [workspaceId] ) const handleSelectionDrop = useCallback( @@ -362,7 +409,9 @@ export function useDragDrop(options: UseDragDropOptions = {}) { try { const destinationFolderId = getDestinationFolderId(indicator) const validFolderIds = folderIds.filter((id) => canMoveFolderTo(id, destinationFolderId)) - if (workflowIds.length === 0 && validFolderIds.length === 0) return + if (workflowIds.length === 0 && validFolderIds.length === 0) { + return + } const siblingItems = getSiblingItems(destinationFolderId) const movingIds = new Set([...workflowIds, ...validFolderIds]) @@ -374,7 +423,7 @@ export function useDragDrop(options: UseDragDropOptions = {}) { destinationFolderId ) - const insertAt = calculateInsertIndex(remaining, indicator) + const insertAt = getInsertIndexInRemaining(siblingItems, movingIds, indicator) const newOrder = [ ...remaining.slice(0, insertAt), ...fromDestination, @@ -397,7 +446,7 @@ export function useDragDrop(options: UseDragDropOptions = {}) { canMoveFolderTo, getSiblingItems, collectMovingItems, - calculateInsertIndex, + getInsertIndexInRemaining, buildAndSubmitUpdates, ] ) @@ -407,8 +456,10 @@ export function useDragDrop(options: UseDragDropOptions = {}) { e.preventDefault() e.stopPropagation() - const indicator = dropIndicator + const indicator = dropIndicatorRef.current + dropIndicatorRef.current = null setDropIndicator(null) + isDraggingRef.current = false setIsDragging(false) siblingsCacheRef.current.clear() @@ -427,7 +478,7 @@ export function useDragDrop(options: UseDragDropOptions = {}) { logger.error('Failed to handle drop:', error) } }, - [dropIndicator, handleSelectionDrop] + [handleSelectionDrop] ) const createWorkflowDragHandlers = useCallback( @@ -535,7 +586,9 @@ export function useDragDrop(options: UseDragDropOptions = {}) { onDragOver: (e: React.DragEvent) => { if (!initDragOver(e)) return if (itemId) { - setDropIndicator({ targetId: itemId, position, folderId: null }) + const edge: DropIndicator = { targetId: itemId, position, folderId: null } + dropIndicatorRef.current = edge + setDropIndicator(edge) } else { setNormalizedDropIndicator({ targetId: 'root', position: 'inside', folderId: null }) } @@ -548,11 +601,15 @@ export function useDragDrop(options: UseDragDropOptions = {}) { const handleDragStart = useCallback((sourceFolderId: string | null) => { draggedSourceFolderRef.current = sourceFolderId + siblingsCacheRef.current.clear() + isDraggingRef.current = true setIsDragging(true) }, []) const handleDragEnd = useCallback(() => { + isDraggingRef.current = false setIsDragging(false) + dropIndicatorRef.current = null setDropIndicator(null) draggedSourceFolderRef.current = null setHoverFolderId(null) diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workflow-operations.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workflow-operations.ts index e304f7d5975..054151a5a5b 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workflow-operations.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workflow-operations.ts @@ -1,11 +1,9 @@ import { useCallback, useMemo } from 'react' import { createLogger } from '@sim/logger' import { useRouter } from 'next/navigation' -import { useShallow } from 'zustand/react/shallow' import { getNextWorkflowColor } from '@/lib/workflows/colors' -import { useCreateWorkflow, useWorkflows } from '@/hooks/queries/workflows' +import { useCreateWorkflow, useWorkflowMap } from '@/hooks/queries/workflows' import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { generateCreativeWorkflowName } from '@/stores/workflows/registry/utils' const logger = createLogger('useWorkflowOperations') @@ -16,17 +14,14 @@ interface UseWorkflowOperationsProps { export function useWorkflowOperations({ workspaceId }: UseWorkflowOperationsProps) { const router = useRouter() - const workflows = useWorkflowRegistry(useShallow((state) => state.workflows)) - const workflowsQuery = useWorkflows(workspaceId) + const { data: workflows = {}, isLoading: workflowsLoading } = useWorkflowMap(workspaceId) const createWorkflowMutation = useCreateWorkflow() const regularWorkflows = useMemo( () => Object.values(workflows) .filter((workflow) => workflow.workspaceId === workspaceId) - .sort((a, b) => { - return b.createdAt.getTime() - a.createdAt.getTime() - }), + .sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime()), [workflows, workspaceId] ) @@ -59,7 +54,7 @@ export function useWorkflowOperations({ workspaceId }: UseWorkflowOperationsProp return { workflows, regularWorkflows, - workflowsLoading: workflowsQuery.isLoading, + workflowsLoading, isCreatingWorkflow: createWorkflowMutation.isPending, handleCreateWorkflow, diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-management.ts b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-management.ts index 7004584a5e9..47da0573a4f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-management.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-workspace-management.ts @@ -110,7 +110,7 @@ export function useWorkspaceManagement({ } try { - await switchToWorkspace(workspace.id) + switchToWorkspace(workspace.id) routerRef.current?.push(`/workspace/${workspace.id}/home`) logger.info(`Switched to workspace: ${workspace.name} (${workspace.id})`) } catch (error) { diff --git a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx index 7c89b20e191..d77d9c7483c 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/components/sidebar/sidebar.tsx @@ -38,6 +38,7 @@ import { import { useSession } from '@/lib/auth/auth-client' import { cn } from '@/lib/core/utils/cn' import { isMacPlatform } from '@/lib/core/utils/platform' +import { buildFolderTree } from '@/lib/folders/tree' import { START_NAV_TOUR_EVENT, START_WORKFLOW_TOUR_EVENT, @@ -77,7 +78,7 @@ import { useImportWorkspace, } from '@/app/workspace/[workspaceId]/w/hooks' import { getBrandConfig } from '@/ee/whitelabeling' -import { useFolders } from '@/hooks/queries/folders' +import { useFolderMap, useFolders } from '@/hooks/queries/folders' import { useKnowledgeBasesQuery } from '@/hooks/queries/kb/knowledge' import { useTablesList } from '@/hooks/queries/tables' import { @@ -88,6 +89,7 @@ import { useRenameTask, useTasks, } from '@/hooks/queries/tasks' +import { useUpdateWorkflow } from '@/hooks/queries/workflows' import { useWorkspaceFiles } from '@/hooks/queries/workspace-files' import { usePermissionConfig } from '@/hooks/use-permission-config' import { useSettingsNavigation } from '@/hooks/use-settings-navigation' @@ -96,7 +98,6 @@ import { SIDEBAR_WIDTH } from '@/stores/constants' import { useFolderStore } from '@/stores/folders/store' import { useSearchModalStore } from '@/stores/modals/search/store' import { useSidebarStore } from '@/stores/sidebar/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('Sidebar') @@ -203,7 +204,7 @@ const SidebarTaskItem = memo(function SidebarTaskItem({ onMoreClick(e, task.id) }} className={cn( - 'flex h-[18px] w-[18px] items-center justify-center rounded-sm opacity-0 group-hover:opacity-100', + 'flex h-[18px] w-[18px] items-center justify-center rounded-sm opacity-0 transition-opacity group-hover:opacity-100', isMenuOpen && 'opacity-100' )} > @@ -436,13 +437,12 @@ export const Sidebar = memo(function Sidebar() { }) useFolders(workspaceId) - const folders = useFolderStore((s) => s.folders) - const getFolderTree = useFolderStore((s) => s.getFolderTree) - const updateWorkflow = useWorkflowRegistry((state) => state.updateWorkflow) + const { data: folderMap = {} } = useFolderMap(workspaceId) + const updateWorkflowMutation = useUpdateWorkflow() const folderTree = useMemo( - () => (isCollapsed && workspaceId ? getFolderTree(workspaceId) : []), - [isCollapsed, workspaceId, folders, getFolderTree] + () => (isCollapsed && workspaceId ? buildFolderTree(folderMap, workspaceId) : []), + [isCollapsed, workspaceId, folderMap] ) const workflowsByFolder = useMemo( @@ -814,7 +814,11 @@ export const Sidebar = memo(function Sidebar() { const workflowFlyoutRename = useFlyoutInlineRename({ itemType: 'workflow', onSave: async (workflowIdToRename, name) => { - await updateWorkflow(workflowIdToRename, { name }) + await updateWorkflowMutation.mutateAsync({ + workspaceId, + workflowId: workflowIdToRename, + metadata: { name }, + }) }, }) @@ -1329,11 +1333,8 @@ export const Sidebar = memo(function Sidebar() { !hasOverflowTop && 'border-transparent' )} > -
-
+
+
All tasks
{!isCollapsed && (
@@ -1454,10 +1455,10 @@ export const Sidebar = memo(function Sidebar() {
-
+
Workflows
{!isCollapsed && (
diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-can-delete.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-can-delete.ts index e109ca816cb..22c96afbb85 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-can-delete.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-can-delete.ts @@ -1,6 +1,6 @@ import { useCallback, useMemo } from 'react' -import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import { useFolderMap } from '@/hooks/queries/folders' +import { useWorkflows } from '@/hooks/queries/workflows' interface UseCanDeleteProps { /** @@ -36,17 +36,15 @@ interface UseCanDeleteReturn { * @returns Functions to check deletion eligibility */ export function useCanDelete({ workspaceId }: UseCanDeleteProps): UseCanDeleteReturn { - const workflows = useWorkflowRegistry((s) => s.workflows) - const folders = useFolderStore((s) => s.folders) + const { data: workflowList = [] } = useWorkflows(workspaceId) + const { data: folders = {} } = useFolderMap(workspaceId) /** * Pre-computed data structures for efficient lookups */ const { totalWorkflows, workflowIdSet, workflowsByFolderId, childFoldersByParentId } = useMemo(() => { - const workspaceWorkflows = Object.values(workflows).filter( - (w) => w.workspaceId === workspaceId - ) + const workspaceWorkflows = workflowList.filter((w) => w.workspaceId === workspaceId) const idSet = new Set(workspaceWorkflows.map((w) => w.id)) @@ -72,7 +70,7 @@ export function useCanDelete({ workspaceId }: UseCanDeleteProps): UseCanDeleteRe workflowsByFolderId: byFolderId, childFoldersByParentId: childrenByParent, } - }, [workflows, folders, workspaceId]) + }, [workflowList, folders, workspaceId]) /** * Count workflows in a folder and all its subfolders recursively. diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-selection.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-selection.ts index b37cf32c322..bb22fb2fa01 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-selection.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-selection.ts @@ -2,8 +2,8 @@ import { useCallback, useState } from 'react' import { createLogger } from '@sim/logger' import { useRouter } from 'next/navigation' import { useDeleteFolderMutation } from '@/hooks/queries/folders' +import { useDeleteWorkflowMutation, useWorkflows } from '@/hooks/queries/workflows' import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('useDeleteSelection') @@ -46,8 +46,8 @@ export function useDeleteSelection({ onSuccess, }: UseDeleteSelectionProps) { const router = useRouter() - const workflows = useWorkflowRegistry((s) => s.workflows) - const removeWorkflow = useWorkflowRegistry((s) => s.removeWorkflow) + const { data: workflowList = [] } = useWorkflows(workspaceId) + const deleteWorkflowMutation = useDeleteWorkflowMutation() const deleteFolderMutation = useDeleteFolderMutation() const [isDeleting, setIsDeleting] = useState(false) @@ -72,7 +72,7 @@ export function useDeleteSelection({ ? workflowIds.some((id) => isActiveWorkflow(id)) : false - const sidebarWorkflows = Object.values(workflows).filter((w) => w.workspaceId === workspaceId) + const sidebarWorkflows = workflowList.filter((w) => w.workspaceId === workspaceId) const workflowsInFolders = sidebarWorkflows .filter((w) => w.folderId && folderIds.includes(w.folderId)) @@ -128,7 +128,11 @@ export function useDeleteSelection({ } const standaloneWorkflowIds = workflowIds.filter((id) => !workflowsInFolders.includes(id)) - await Promise.all(standaloneWorkflowIds.map((id) => removeWorkflow(id))) + await Promise.all( + standaloneWorkflowIds.map((id) => + deleteWorkflowMutation.mutateAsync({ workspaceId, workflowId: id }) + ) + ) const { clearSelection, clearFolderSelection } = useFolderStore.getState() clearSelection() @@ -151,12 +155,10 @@ export function useDeleteSelection({ workflowIds, folderIds, isDeleting, - workflows, + workflowList, workspaceId, isActiveWorkflow, router, - removeWorkflow, - deleteFolderMutation, onSuccess, ]) diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-workflow.ts index 37a56d24c0d..0e9c5d82cc5 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-workflow.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-delete-workflow.ts @@ -1,10 +1,8 @@ import { useCallback, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { useRouter } from 'next/navigation' -import { workflowKeys } from '@/hooks/queries/workflows' +import { useDeleteWorkflowMutation, useWorkflows } from '@/hooks/queries/workflows' import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('useDeleteWorkflow') @@ -41,9 +39,8 @@ export function useDeleteWorkflow({ onSuccess, }: UseDeleteWorkflowProps) { const router = useRouter() - const queryClient = useQueryClient() - const workflows = useWorkflowRegistry((s) => s.workflows) - const removeWorkflow = useWorkflowRegistry((s) => s.removeWorkflow) + const { data: workflowList = [] } = useWorkflows(workspaceId) + const deleteWorkflowMutation = useDeleteWorkflowMutation() const [isDeleting, setIsDeleting] = useState(false) /** @@ -65,7 +62,7 @@ export function useDeleteWorkflow({ const isActiveWorkflowBeingDeleted = typeof isActive === 'function' ? isActive(workflowIdsToDelete) : isActive - const sidebarWorkflows = Object.values(workflows).filter((w) => w.workspaceId === workspaceId) + const sidebarWorkflows = workflowList.filter((w) => w.workspaceId === workspaceId) let activeWorkflowId: string | null = null if (isActiveWorkflowBeingDeleted && typeof isActive === 'function') { @@ -105,8 +102,11 @@ export function useDeleteWorkflow({ } } - await Promise.all(workflowIdsToDelete.map((id) => removeWorkflow(id))) - await queryClient.invalidateQueries({ queryKey: workflowKeys.lists() }) + await Promise.all( + workflowIdsToDelete.map((id) => + deleteWorkflowMutation.mutateAsync({ workspaceId, workflowId: id }) + ) + ) const { clearSelection } = useFolderStore.getState() clearSelection() @@ -122,13 +122,12 @@ export function useDeleteWorkflow({ }, [ workflowIds, isDeleting, - workflows, + workflowList, workspaceId, isActive, router, - removeWorkflow, + deleteWorkflowMutation, onSuccess, - queryClient, ]) return { diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-folder.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-folder.ts index 43b7399e4db..56851aed4f0 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-folder.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-folder.ts @@ -1,6 +1,8 @@ import { useCallback, useState } from 'react' import { createLogger } from '@sim/logger' +import { getChildFolders, getFolderById } from '@/lib/folders/tree' import { useDuplicateFolderMutation } from '@/hooks/queries/folders' +import { getFolderMap } from '@/hooks/queries/utils/folder-cache' import { useFolderStore } from '@/stores/folders/store' const logger = createLogger('useDuplicateFolder') @@ -54,10 +56,10 @@ export function useDuplicateFolder({ workspaceId, folderIds, onSuccess }: UseDup const folderIdsToDuplicate = Array.isArray(folderIds) ? folderIds : [folderIds] const duplicatedIds: string[] = [] - const folderStore = useFolderStore.getState() + const folderMap = getFolderMap(workspaceId) for (const folderId of folderIdsToDuplicate) { - const folder = folderStore.getFolderById(folderId) + const folder = getFolderById(folderMap, folderId) if (!folder) { logger.warn('Attempted to duplicate folder that no longer exists', { folderId }) @@ -65,7 +67,7 @@ export function useDuplicateFolder({ workspaceId, folderIds, onSuccess }: UseDup } const siblingNames = new Set( - folderStore.getChildFolders(folder.parentId).map((sibling) => sibling.name) + getChildFolders(folderMap, folder.parentId).map((sibling) => sibling.name) ) siblingNames.add(folder.name) diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-selection.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-selection.ts index 48a48146e07..4ee23292533 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-selection.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-selection.ts @@ -1,11 +1,13 @@ import { useCallback, useRef, useState } from 'react' import { createLogger } from '@sim/logger' import { useRouter } from 'next/navigation' +import { getChildFolders, getFolderById } from '@/lib/folders/tree' import { getNextWorkflowColor } from '@/lib/workflows/colors' import { useDuplicateFolderMutation } from '@/hooks/queries/folders' +import { getFolderMap } from '@/hooks/queries/utils/folder-cache' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' import { useDuplicateWorkflowMutation } from '@/hooks/queries/workflows' import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('useDuplicateSelection') @@ -62,21 +64,21 @@ export function useDuplicateSelection({ workspaceId, onSuccess }: UseDuplicateSe setIsDuplicating(true) try { - const { workflows } = useWorkflowRegistry.getState() - const folderStore = useFolderStore.getState() + const workflowMap = new Map(getWorkflows(workspaceIdRef.current).map((w) => [w.id, w])) + const folderMap = getFolderMap(workspaceIdRef.current) const duplicatedWorkflowIds: string[] = [] const duplicatedFolderIds: string[] = [] for (const folderId of folderIds) { - const folder = folderStore.getFolderById(folderId) + const folder = getFolderById(folderMap, folderId) if (!folder) { logger.warn(`Folder ${folderId} not found, skipping`) continue } const siblingNames = new Set( - folderStore.getChildFolders(folder.parentId).map((sibling) => sibling.name) + getChildFolders(folderMap, folder.parentId).map((sibling) => sibling.name) ) siblingNames.add(folder.name) @@ -97,7 +99,7 @@ export function useDuplicateSelection({ workspaceId, onSuccess }: UseDuplicateSe } for (const workflowId of workflowIds) { - const workflow = workflows[workflowId] + const workflow = workflowMap.get(workflowId) if (!workflow) { logger.warn(`Workflow ${workflowId} not found, skipping`) continue diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-workflow.ts index e1a14b49bfc..bffe42d6266 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-workflow.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-duplicate-workflow.ts @@ -2,9 +2,9 @@ import { useCallback, useRef } from 'react' import { createLogger } from '@sim/logger' import { useRouter } from 'next/navigation' import { getNextWorkflowColor } from '@/lib/workflows/colors' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' import { useDuplicateWorkflowMutation } from '@/hooks/queries/workflows' import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('useDuplicateWorkflow') @@ -61,10 +61,10 @@ export function useDuplicateWorkflow({ workspaceId, onSuccess }: UseDuplicateWor const duplicatedIds: string[] = [] try { - const { workflows } = useWorkflowRegistry.getState() + const workflowMap = new Map(getWorkflows(workspaceIdRef.current).map((w) => [w.id, w])) for (const sourceId of workflowIdsToDuplicate) { - const sourceWorkflow = workflows[sourceId] + const sourceWorkflow = workflowMap.get(sourceId) if (!sourceWorkflow) { logger.warn(`Workflow ${sourceId} not found, skipping`) continue diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-folder.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-folder.ts index e7a646b2f68..ff85f17e938 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-folder.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-folder.ts @@ -1,5 +1,7 @@ import { useCallback, useMemo, useState } from 'react' import { createLogger } from '@sim/logger' +import { useParams } from 'next/navigation' +import { getFolderById } from '@/lib/folders/tree' import { downloadFile, exportFolderToZip, @@ -8,9 +10,10 @@ import { sanitizePathSegment, type WorkflowExportData, } from '@/lib/workflows/operations/import-export' +import { useFolderMap } from '@/hooks/queries/folders' +import { useWorkflowMap } from '@/hooks/queries/workflows' import { useFolderStore } from '@/stores/folders/store' import type { WorkflowFolder } from '@/stores/folders/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import type { WorkflowMetadata } from '@/stores/workflows/registry/types' const logger = createLogger('useExportFolder') @@ -89,8 +92,9 @@ function collectSubfolders( * Hook for managing folder export to ZIP. */ export function useExportFolder({ folderId, onSuccess }: UseExportFolderProps) { - const workflows = useWorkflowRegistry((s) => s.workflows) - const folders = useFolderStore((s) => s.folders) + const { workspaceId } = useParams<{ workspaceId: string }>() + const { data: workflows = {} } = useWorkflowMap(workspaceId) + const { data: folders = {} } = useFolderMap(workspaceId) const [isExporting, setIsExporting] = useState(false) const hasWorkflows = useMemo(() => { @@ -105,22 +109,21 @@ export function useExportFolder({ folderId, onSuccess }: UseExportFolderProps) { setIsExporting(true) try { - const folderStore = useFolderStore.getState() - const folder = folderStore.getFolderById(folderId) + const folder = getFolderById(folders, folderId) if (!folder) { logger.warn('Folder not found for export', { folderId }) return } - const workflowsToExport = collectWorkflowsInFolder(folderId, workflows, folderStore.folders) + const workflowsToExport = collectWorkflowsInFolder(folderId, workflows, folders) if (workflowsToExport.length === 0) { logger.warn('No workflows found in folder to export', { folderId, folderName: folder.name }) return } - const subfolders = collectSubfolders(folderId, folderStore.folders) + const subfolders = collectSubfolders(folderId, folders) logger.info('Starting folder export', { folderId, diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-selection.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-selection.ts index 92502caf55f..deed7a4e1dc 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-selection.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-selection.ts @@ -1,5 +1,6 @@ import { useCallback, useRef, useState } from 'react' import { createLogger } from '@sim/logger' +import { useParams } from 'next/navigation' import { downloadFile, exportWorkflowsToZip, @@ -7,9 +8,10 @@ import { fetchWorkflowForExport, type WorkflowExportData, } from '@/lib/workflows/operations/import-export' +import { getFolderMap } from '@/hooks/queries/utils/folder-cache' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' import { useFolderStore } from '@/stores/folders/store' import type { WorkflowFolder } from '@/stores/folders/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import type { WorkflowMetadata } from '@/stores/workflows/registry/types' const logger = createLogger('useExportSelection') @@ -88,10 +90,15 @@ function collectSubfoldersForMultipleFolders( */ export function useExportSelection({ onSuccess }: UseExportSelectionProps = {}) { const [isExporting, setIsExporting] = useState(false) + const params = useParams() + const workspaceId = params.workspaceId as string | undefined const onSuccessRef = useRef(onSuccess) onSuccessRef.current = onSuccess + const workspaceIdRef = useRef(workspaceId) + workspaceIdRef.current = workspaceId + /** * Export all selected workflows and folders to a ZIP file. * - Collects workflows from selected folders recursively @@ -113,25 +120,29 @@ export function useExportSelection({ onSuccess }: UseExportSelectionProps = {}) setIsExporting(true) try { - const { workflows } = useWorkflowRegistry.getState() - const { folders } = useFolderStore.getState() + if (!workspaceIdRef.current) return + const workflowsArray = getWorkflows(workspaceIdRef.current) + const workflows = Object.fromEntries(workflowsArray.map((w) => [w.id, w])) + const folderMap = getFolderMap(workspaceIdRef.current) const workflowsFromFolders: CollectedWorkflow[] = [] for (const folderId of folderIds) { - const collected = collectWorkflowsInFolder(folderId, workflows, folders) + const collected = collectWorkflowsInFolder(folderId, workflows, folderMap) workflowsFromFolders.push(...collected) } - const subfolders = collectSubfoldersForMultipleFolders(folderIds, folders) - - const selectedFoldersData: FolderExportData[] = folderIds.map((folderId) => { - const folder = folders[folderId] - return { - id: folder.id, - name: folder.name, - parentId: null, - } - }) + const subfolders = collectSubfoldersForMultipleFolders(folderIds, folderMap) + + const selectedFoldersData: FolderExportData[] = folderIds + .filter((folderId) => folderMap[folderId]) + .map((folderId) => { + const folder = folderMap[folderId] + return { + id: folder.id, + name: folder.name, + parentId: null, + } + }) const allFolders = [...selectedFoldersData, ...subfolders] const workflowIdsFromFolders = workflowsFromFolders.map((w) => w.id) diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts index afa812ed549..91faca98627 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-export-workflow.ts @@ -1,5 +1,6 @@ import { useCallback, useRef, useState } from 'react' import { createLogger } from '@sim/logger' +import { useParams } from 'next/navigation' import { downloadFile, exportWorkflowsToZip, @@ -7,8 +8,8 @@ import { fetchWorkflowForExport, sanitizePathSegment, } from '@/lib/workflows/operations/import-export' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' import { useFolderStore } from '@/stores/folders/store' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('useExportWorkflow') @@ -24,10 +25,15 @@ interface UseExportWorkflowProps { */ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) { const [isExporting, setIsExporting] = useState(false) + const params = useParams() + const workspaceId = params.workspaceId as string | undefined const onSuccessRef = useRef(onSuccess) onSuccessRef.current = onSuccess + const workspaceIdRef = useRef(workspaceId) + workspaceIdRef.current = workspaceId + /** * Export the workflow(s) to JSON or ZIP * - Single workflow: exports as JSON file @@ -52,11 +58,12 @@ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) { count: workflowIdsToExport.length, }) - const { workflows } = useWorkflowRegistry.getState() + if (!workspaceIdRef.current) return + const workflowMap = new Map(getWorkflows(workspaceIdRef.current).map((w) => [w.id, w])) const exportedWorkflows = [] for (const workflowId of workflowIdsToExport) { - const workflowMeta = workflows[workflowId] + const workflowMeta = workflowMap.get(workflowId) if (!workflowMeta) { logger.warn(`Workflow ${workflowId} not found in registry`) continue diff --git a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts index c7f461b71f6..75ed0a4577f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/hooks/use-import-workflow.ts @@ -8,8 +8,10 @@ import { persistImportedWorkflow, sanitizePathSegment, } from '@/lib/workflows/operations/import-export' -import { folderKeys, useCreateFolder } from '@/hooks/queries/folders' -import { useCreateWorkflow, workflowKeys } from '@/hooks/queries/workflows' +import { useCreateFolder } from '@/hooks/queries/folders' +import { folderKeys } from '@/hooks/queries/utils/folder-keys' +import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists' +import { useCreateWorkflow } from '@/hooks/queries/workflows' import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' const logger = createLogger('useImportWorkflow') @@ -196,7 +198,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) { } } - await queryClient.invalidateQueries({ queryKey: workflowKeys.lists() }) + await invalidateWorkflowLists(queryClient, workspaceId) await queryClient.invalidateQueries({ queryKey: folderKeys.list(workspaceId) }) logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`) diff --git a/apps/sim/app/workspace/[workspaceId]/w/page.tsx b/apps/sim/app/workspace/[workspaceId]/w/page.tsx index e19bfd387e4..1276a785276 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/page.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/page.tsx @@ -12,47 +12,41 @@ const logger = createLogger('WorkflowsPage') export default function WorkflowsPage() { const router = useRouter() - const workflows = useWorkflowRegistry((s) => s.workflows) const setActiveWorkflow = useWorkflowRegistry((s) => s.setActiveWorkflow) const params = useParams() const workspaceId = params.workspaceId as string const [isMounted, setIsMounted] = useState(false) - // Fetch workflows using React Query - const { isLoading, isError } = useWorkflows(workspaceId) + const { data: workflows = [], isLoading, isError, isPlaceholderData } = useWorkflows(workspaceId) - // Track when component is mounted to avoid hydration issues useEffect(() => { setIsMounted(true) }, []) - // Handle redirection once workflows are loaded and component is mounted useEffect(() => { - // Wait for component to be mounted to avoid hydration mismatches if (!isMounted) return - - // Only proceed if workflows are done loading - if (isLoading) return + if (isLoading || isPlaceholderData) return if (isError) { logger.error('Failed to load workflows for workspace') return } - const workflowIds = Object.keys(workflows) - - // Validate that workflows belong to the current workspace - const workspaceWorkflows = workflowIds.filter((id) => { - const workflow = workflows[id] - return workflow.workspaceId === workspaceId - }) + const workspaceWorkflows = workflows.filter((w) => w.workspaceId === workspaceId) - // If we have valid workspace workflows, redirect to the first one if (workspaceWorkflows.length > 0) { - const firstWorkflowId = workspaceWorkflows[0] - router.replace(`/workspace/${workspaceId}/w/${firstWorkflowId}`) + router.replace(`/workspace/${workspaceId}/w/${workspaceWorkflows[0].id}`) } - }, [isMounted, isLoading, workflows, workspaceId, router, setActiveWorkflow, isError]) + }, [ + isMounted, + isLoading, + isPlaceholderData, + workflows, + workspaceId, + router, + setActiveWorkflow, + isError, + ]) // Always show loading state until redirect happens // There should always be a default workflow, so we never show "no workflows found" diff --git a/apps/sim/background/knowledge-processing.ts b/apps/sim/background/knowledge-processing.ts index 8f7d75c4284..5f20d5af285 100644 --- a/apps/sim/background/knowledge-processing.ts +++ b/apps/sim/background/knowledge-processing.ts @@ -15,11 +15,8 @@ export type DocumentProcessingPayload = { mimeType: string } processingOptions: { - chunkSize?: number - minCharactersPerChunk?: number recipe?: string lang?: string - chunkOverlap?: number } requestId: string } diff --git a/apps/sim/blocks/blocks/agent.ts b/apps/sim/blocks/blocks/agent.ts index a7a0829c882..b44ef4658c3 100644 --- a/apps/sim/blocks/blocks/agent.ts +++ b/apps/sim/blocks/blocks/agent.ts @@ -7,20 +7,30 @@ import { getApiKeyCondition, getModelOptions, RESPONSE_FORMAT_WAND_CONFIG } from import { getBaseModelProviders, getMaxTemperature, + getModelsWithDeepResearch, + getModelsWithoutMemory, + getModelsWithReasoningEffort, + getModelsWithThinking, + getModelsWithVerbosity, + getProviderModels, getReasoningEffortValuesForModel, getThinkingLevelsForModel, getVerbosityValuesForModel, - MODELS_WITH_DEEP_RESEARCH, - MODELS_WITH_REASONING_EFFORT, - MODELS_WITH_THINKING, - MODELS_WITH_VERBOSITY, - MODELS_WITHOUT_MEMORY, - providers, supportsTemperature, -} from '@/providers/utils' +} from '@/providers/models' +import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import { useSubBlockStore } from '@/stores/workflows/subblock/store' import type { ToolResponse } from '@/tools/types' const logger = createLogger('AgentBlock') +const VERTEX_MODELS = getProviderModels('vertex') +const BEDROCK_MODELS = getProviderModels('bedrock') +const AZURE_MODELS = [...getProviderModels('azure-openai'), ...getProviderModels('azure-anthropic')] +const MODELS_WITH_REASONING_EFFORT = getModelsWithReasoningEffort() +const MODELS_WITH_VERBOSITY = getModelsWithVerbosity() +const MODELS_WITH_THINKING = getModelsWithThinking() +const MODELS_WITH_DEEP_RESEARCH = getModelsWithDeepResearch() +const MODELS_WITHOUT_MEMORY = getModelsWithoutMemory() interface AgentResponse extends ToolResponse { output: { @@ -136,7 +146,7 @@ Return ONLY the JSON array.`, required: true, condition: { field: 'model', - value: providers.vertex.models, + value: VERTEX_MODELS, }, }, { @@ -149,7 +159,7 @@ Return ONLY the JSON array.`, required: true, condition: { field: 'model', - value: providers.vertex.models, + value: VERTEX_MODELS, }, }, { @@ -165,9 +175,6 @@ Return ONLY the JSON array.`, ], dependsOn: ['model'], fetchOptions: async (blockId: string) => { - const { useSubBlockStore } = await import('@/stores/workflows/subblock/store') - const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store') - const autoOption = { label: 'auto', id: 'auto' } const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId @@ -224,9 +231,6 @@ Return ONLY the JSON array.`, ], dependsOn: ['model'], fetchOptions: async (blockId: string) => { - const { useSubBlockStore } = await import('@/stores/workflows/subblock/store') - const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store') - const autoOption = { label: 'auto', id: 'auto' } const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId @@ -285,9 +289,6 @@ Return ONLY the JSON array.`, ], dependsOn: ['model'], fetchOptions: async (blockId: string) => { - const { useSubBlockStore } = await import('@/stores/workflows/subblock/store') - const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store') - const noneOption = { label: 'none', id: 'none' } const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId @@ -326,7 +327,7 @@ Return ONLY the JSON array.`, connectionDroppable: false, condition: { field: 'model', - value: [...providers['azure-openai'].models, ...providers['azure-anthropic'].models], + value: AZURE_MODELS, }, }, { @@ -337,7 +338,7 @@ Return ONLY the JSON array.`, connectionDroppable: false, condition: { field: 'model', - value: [...providers['azure-openai'].models, ...providers['azure-anthropic'].models], + value: AZURE_MODELS, }, }, { @@ -349,7 +350,7 @@ Return ONLY the JSON array.`, required: true, condition: { field: 'model', - value: providers.vertex.models, + value: VERTEX_MODELS, }, }, { @@ -361,7 +362,7 @@ Return ONLY the JSON array.`, required: true, condition: { field: 'model', - value: providers.vertex.models, + value: VERTEX_MODELS, }, }, { @@ -374,7 +375,7 @@ Return ONLY the JSON array.`, required: true, condition: { field: 'model', - value: providers.bedrock.models, + value: BEDROCK_MODELS, }, }, { @@ -387,7 +388,7 @@ Return ONLY the JSON array.`, required: true, condition: { field: 'model', - value: providers.bedrock.models, + value: BEDROCK_MODELS, }, }, { @@ -398,7 +399,7 @@ Return ONLY the JSON array.`, connectionDroppable: false, condition: { field: 'model', - value: providers.bedrock.models, + value: BEDROCK_MODELS, }, }, { diff --git a/apps/sim/blocks/blocks/attio.ts b/apps/sim/blocks/blocks/attio.ts index 21ee5ffdf82..9a6af86f6a3 100644 --- a/apps/sim/blocks/blocks/attio.ts +++ b/apps/sim/blocks/blocks/attio.ts @@ -36,6 +36,7 @@ export const AttioBlock: BlockConfig = { { label: 'Create Note', id: 'create_note' }, { label: 'Delete Note', id: 'delete_note' }, { label: 'List Tasks', id: 'list_tasks' }, + { label: 'Get Task', id: 'get_task' }, { label: 'Create Task', id: 'create_task' }, { label: 'Update Task', id: 'update_task' }, { label: 'Delete Task', id: 'delete_task' }, @@ -490,8 +491,8 @@ Return ONLY the JSON array. No explanations, no markdown, no extra text. title: 'Task ID', type: 'short-input', placeholder: 'Enter the task ID', - condition: { field: 'operation', value: ['update_task', 'delete_task'] }, - required: { field: 'operation', value: ['update_task', 'delete_task'] }, + condition: { field: 'operation', value: ['get_task', 'update_task', 'delete_task'] }, + required: { field: 'operation', value: ['get_task', 'update_task', 'delete_task'] }, }, { id: 'taskFilterObject', @@ -944,7 +945,7 @@ YYYY-MM-DDTHH:mm:ss.SSSZ type: 'short-input', placeholder: 'https://example.com/webhook', condition: { field: 'operation', value: ['create_webhook', 'update_webhook'] }, - required: { field: 'operation', value: ['create_webhook', 'update_webhook'] }, + required: { field: 'operation', value: 'create_webhook' }, }, { id: 'webhookSubscriptions', @@ -952,7 +953,7 @@ YYYY-MM-DDTHH:mm:ss.SSSZ type: 'code', placeholder: '[{"event_type":"record.created","filter":{"object_id":"..."}}]', condition: { field: 'operation', value: ['create_webhook', 'update_webhook'] }, - required: { field: 'operation', value: ['create_webhook', 'update_webhook'] }, + required: { field: 'operation', value: 'create_webhook' }, wandConfig: { enabled: true, maintainHistory: true, @@ -1040,6 +1041,10 @@ workspace-member.created ...getTrigger('attio_list_entry_created').subBlocks, ...getTrigger('attio_list_entry_updated').subBlocks, ...getTrigger('attio_list_entry_deleted').subBlocks, + ...getTrigger('attio_list_created').subBlocks, + ...getTrigger('attio_list_updated').subBlocks, + ...getTrigger('attio_list_deleted').subBlocks, + ...getTrigger('attio_workspace_member_created').subBlocks, ...getTrigger('attio_webhook').subBlocks, ], @@ -1063,6 +1068,10 @@ workspace-member.created 'attio_list_entry_created', 'attio_list_entry_updated', 'attio_list_entry_deleted', + 'attio_list_created', + 'attio_list_updated', + 'attio_list_deleted', + 'attio_workspace_member_created', 'attio_webhook', ], }, @@ -1081,6 +1090,7 @@ workspace-member.created 'attio_create_note', 'attio_delete_note', 'attio_list_tasks', + 'attio_get_task', 'attio_create_task', 'attio_update_task', 'attio_delete_task', diff --git a/apps/sim/blocks/blocks/evaluator.ts b/apps/sim/blocks/blocks/evaluator.ts index 4edb032ba9f..d3a78fa9574 100644 --- a/apps/sim/blocks/blocks/evaluator.ts +++ b/apps/sim/blocks/blocks/evaluator.ts @@ -6,8 +6,8 @@ import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS, } from '@/blocks/utils' +import { getBaseModelProviders } from '@/providers/models' import type { ProviderId } from '@/providers/types' -import { getBaseModelProviders } from '@/providers/utils' import type { ToolResponse } from '@/tools/types' const logger = createLogger('EvaluatorBlock') diff --git a/apps/sim/blocks/blocks/extend.ts b/apps/sim/blocks/blocks/extend.ts new file mode 100644 index 00000000000..3d1572eeb8a --- /dev/null +++ b/apps/sim/blocks/blocks/extend.ts @@ -0,0 +1,199 @@ +import { ExtendIcon } from '@/components/icons' +import { AuthMode, type BlockConfig, IntegrationType, type SubBlockType } from '@/blocks/types' +import { createVersionedToolSelector, normalizeFileInput } from '@/blocks/utils' +import type { ExtendParserOutput } from '@/tools/extend/types' + +export const ExtendBlock: BlockConfig = { + type: 'extend', + name: 'Extend', + description: 'Parse and extract content from documents', + hideFromToolbar: true, + authMode: AuthMode.ApiKey, + longDescription: + 'Integrate Extend AI into the workflow. Parse and extract structured content from documents including PDFs, images, and Office files.', + docsLink: 'https://docs.sim.ai/tools/extend', + category: 'tools', + integrationType: IntegrationType.AI, + tags: ['document-processing', 'ocr'], + bgColor: '#000000', + icon: ExtendIcon, + subBlocks: [ + { + id: 'fileUpload', + title: 'Document', + type: 'file-upload' as SubBlockType, + canonicalParamId: 'document', + acceptedTypes: + 'application/pdf,image/jpeg,image/png,image/tiff,image/gif,image/bmp,image/webp,application/vnd.openxmlformats-officedocument.wordprocessingml.document,application/vnd.openxmlformats-officedocument.presentationml.presentation,application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + placeholder: 'Upload a document', + mode: 'basic', + maxSize: 50, + required: true, + }, + { + id: 'filePath', + title: 'Document', + type: 'short-input' as SubBlockType, + canonicalParamId: 'document', + placeholder: 'Document URL', + mode: 'advanced', + required: true, + }, + { + id: 'outputFormat', + title: 'Output Format', + type: 'dropdown', + options: [ + { id: 'markdown', label: 'Markdown' }, + { id: 'spatial', label: 'Spatial' }, + ], + }, + { + id: 'chunking', + title: 'Chunking Strategy', + type: 'dropdown', + options: [ + { id: 'page', label: 'Page' }, + { id: 'document', label: 'Document' }, + { id: 'section', label: 'Section' }, + ], + }, + { + id: 'engine', + title: 'Engine', + type: 'dropdown', + mode: 'advanced', + options: [ + { id: 'parse_performance', label: 'Performance' }, + { id: 'parse_light', label: 'Light' }, + ], + }, + { + id: 'apiKey', + title: 'API Key', + type: 'short-input' as SubBlockType, + placeholder: 'Enter your Extend API key', + password: true, + required: true, + }, + ], + tools: { + access: ['extend_parser'], + config: { + tool: () => 'extend_parser', + params: (params) => { + const parameters: Record = { + apiKey: params.apiKey.trim(), + } + + const documentInput = params.document + + if (typeof documentInput === 'object') { + parameters.file = documentInput + } else if (typeof documentInput === 'string') { + parameters.filePath = documentInput.trim() + } + + if (params.outputFormat) { + parameters.outputFormat = params.outputFormat + } + + if (params.chunking) { + parameters.chunking = params.chunking + } + + if (params.engine) { + parameters.engine = params.engine + } + + return parameters + }, + }, + }, + inputs: { + document: { + type: 'json', + description: 'Document input (canonical param for file upload or URL)', + }, + apiKey: { type: 'string', description: 'Extend API key' }, + outputFormat: { type: 'string', description: 'Output format (markdown or spatial)' }, + chunking: { type: 'string', description: 'Chunking strategy' }, + engine: { type: 'string', description: 'Parsing engine' }, + }, + outputs: { + id: { type: 'string', description: 'Unique identifier for the parser run' }, + status: { type: 'string', description: 'Processing status' }, + chunks: { type: 'json', description: 'Parsed document content chunks' }, + blocks: { type: 'json', description: 'Block-level document elements' }, + pageCount: { type: 'number', description: 'Number of pages processed' }, + creditsUsed: { type: 'number', description: 'API credits consumed' }, + }, +} + +const extendV2Inputs = ExtendBlock.inputs +const extendV2SubBlocks = (ExtendBlock.subBlocks || []).flatMap((subBlock) => { + if (subBlock.id === 'filePath') { + return [] + } + if (subBlock.id === 'fileUpload') { + return [ + subBlock, + { + id: 'fileReference', + title: 'Document', + type: 'short-input' as SubBlockType, + canonicalParamId: 'document', + placeholder: 'Connect a file output from another block', + mode: 'advanced' as const, + required: true, + }, + ] + } + return [subBlock] +}) + +export const ExtendV2Block: BlockConfig = { + ...ExtendBlock, + type: 'extend_v2', + name: 'Extend', + hideFromToolbar: false, + longDescription: + 'Integrate Extend AI into the workflow. Parse and extract structured content from documents or file references.', + subBlocks: extendV2SubBlocks, + tools: { + access: ['extend_parser_v2'], + config: { + tool: createVersionedToolSelector({ + baseToolSelector: () => 'extend_parser', + suffix: '_v2', + fallbackToolId: 'extend_parser_v2', + }), + params: (params) => { + const parameters: Record = { + apiKey: params.apiKey.trim(), + } + + const documentInput = normalizeFileInput(params.document, { single: true }) + if (!documentInput) { + throw new Error('Document file is required') + } + parameters.file = documentInput + + if (params.outputFormat) { + parameters.outputFormat = params.outputFormat + } + + if (params.chunking) { + parameters.chunking = params.chunking + } + + if (params.engine) { + parameters.engine = params.engine + } + + return parameters + }, + }, + }, + inputs: extendV2Inputs, +} diff --git a/apps/sim/blocks/blocks/launchdarkly.ts b/apps/sim/blocks/blocks/launchdarkly.ts new file mode 100644 index 00000000000..dbc78c214ad --- /dev/null +++ b/apps/sim/blocks/blocks/launchdarkly.ts @@ -0,0 +1,343 @@ +import { LaunchDarklyIcon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' +import { AuthMode, IntegrationType } from '@/blocks/types' + +export const LaunchDarklyBlock: BlockConfig = { + type: 'launchdarkly', + name: 'LaunchDarkly', + description: 'Manage feature flags with LaunchDarkly.', + longDescription: + 'Integrate LaunchDarkly into your workflow. List, create, update, toggle, and delete feature flags. Manage projects, environments, segments, members, and audit logs. Requires API Key.', + docsLink: 'https://docs.sim.ai/tools/launchdarkly', + category: 'tools', + integrationType: IntegrationType.DeveloperTools, + tags: ['feature-flags', 'ci-cd'], + bgColor: '#191919', + icon: LaunchDarklyIcon, + authMode: AuthMode.ApiKey, + + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'List Flags', id: 'list_flags' }, + { label: 'Get Flag', id: 'get_flag' }, + { label: 'Create Flag', id: 'create_flag' }, + { label: 'Update Flag', id: 'update_flag' }, + { label: 'Toggle Flag', id: 'toggle_flag' }, + { label: 'Delete Flag', id: 'delete_flag' }, + { label: 'Get Flag Status', id: 'get_flag_status' }, + { label: 'List Projects', id: 'list_projects' }, + { label: 'List Environments', id: 'list_environments' }, + { label: 'List Segments', id: 'list_segments' }, + { label: 'List Members', id: 'list_members' }, + { label: 'Get Audit Log', id: 'get_audit_log' }, + ], + value: () => 'list_flags', + }, + { + id: 'apiKey', + title: 'API Key', + type: 'short-input', + placeholder: 'Enter your LaunchDarkly API key', + password: true, + required: true, + }, + + // Project key — needed for all except list_projects, list_members, get_audit_log + { + id: 'projectKey', + title: 'Project Key', + type: 'short-input', + placeholder: 'my-project', + condition: { + field: 'operation', + value: ['list_projects', 'list_members', 'get_audit_log'], + not: true, + }, + required: { + field: 'operation', + value: ['list_projects', 'list_members', 'get_audit_log'], + not: true, + }, + }, + + // Flag key — needed for get_flag, toggle_flag, delete_flag, update_flag, get_flag_status + { + id: 'flagKey', + title: 'Flag Key', + type: 'short-input', + placeholder: 'my-feature-flag', + condition: { + field: 'operation', + value: ['get_flag', 'toggle_flag', 'delete_flag', 'update_flag', 'get_flag_status'], + }, + required: { + field: 'operation', + value: ['get_flag', 'toggle_flag', 'delete_flag', 'update_flag', 'get_flag_status'], + }, + }, + + // Environment key — optional for list_flags/get_flag, required for toggle_flag/get_flag_status/list_segments + { + id: 'environmentKey', + title: 'Environment Key', + type: 'short-input', + placeholder: 'production', + condition: { + field: 'operation', + value: ['list_flags', 'get_flag', 'toggle_flag', 'get_flag_status', 'list_segments'], + }, + required: { + field: 'operation', + value: ['toggle_flag', 'get_flag_status', 'list_segments'], + }, + }, + + // Enabled toggle — for toggle_flag only + { + id: 'enabled', + title: 'Enable Flag', + type: 'dropdown', + options: [ + { label: 'On', id: 'true' }, + { label: 'Off', id: 'false' }, + ], + value: () => 'true', + condition: { field: 'operation', value: 'toggle_flag' }, + }, + + // Create flag fields + { + id: 'flagName', + title: 'Flag Name', + type: 'short-input', + placeholder: 'My Feature Flag', + condition: { field: 'operation', value: 'create_flag' }, + required: { field: 'operation', value: 'create_flag' }, + }, + { + id: 'newFlagKey', + title: 'Flag Key', + type: 'short-input', + placeholder: 'my-feature-flag', + condition: { field: 'operation', value: 'create_flag' }, + required: { field: 'operation', value: 'create_flag' }, + }, + { + id: 'description', + title: 'Description', + type: 'long-input', + placeholder: 'Description of the feature flag', + condition: { field: 'operation', value: 'create_flag' }, + }, + { + id: 'tags', + title: 'Tags', + type: 'short-input', + placeholder: 'tag1, tag2', + condition: { field: 'operation', value: 'create_flag' }, + mode: 'advanced', + }, + { + id: 'temporary', + title: 'Temporary', + type: 'dropdown', + options: [ + { label: 'Yes', id: 'true' }, + { label: 'No', id: 'false' }, + ], + value: () => 'true', + condition: { field: 'operation', value: 'create_flag' }, + mode: 'advanced', + }, + + // Update flag fields + { + id: 'updateName', + title: 'New Name', + type: 'short-input', + placeholder: 'Updated flag name', + condition: { field: 'operation', value: 'update_flag' }, + }, + { + id: 'updateDescription', + title: 'New Description', + type: 'long-input', + placeholder: 'Updated description', + condition: { field: 'operation', value: 'update_flag' }, + }, + { + id: 'addTags', + title: 'Add Tags', + type: 'short-input', + placeholder: 'tag1, tag2', + condition: { field: 'operation', value: 'update_flag' }, + mode: 'advanced', + }, + { + id: 'removeTags', + title: 'Remove Tags', + type: 'short-input', + placeholder: 'old-tag1, old-tag2', + condition: { field: 'operation', value: 'update_flag' }, + mode: 'advanced', + }, + { + id: 'archive', + title: 'Archive/Restore', + type: 'dropdown', + options: [ + { label: 'No Change', id: '' }, + { label: 'Archive', id: 'true' }, + { label: 'Restore', id: 'false' }, + ], + value: () => '', + condition: { field: 'operation', value: 'update_flag' }, + mode: 'advanced', + }, + { + id: 'comment', + title: 'Comment', + type: 'short-input', + placeholder: 'Reason for update', + condition: { field: 'operation', value: 'update_flag' }, + mode: 'advanced', + }, + + // Audit log filter + { + id: 'spec', + title: 'Filter', + type: 'short-input', + placeholder: 'resourceType:flag', + condition: { field: 'operation', value: 'get_audit_log' }, + mode: 'advanced', + }, + + // Tag filter for list_flags + { + id: 'tag', + title: 'Filter by Tag', + type: 'short-input', + placeholder: 'tag-name', + condition: { field: 'operation', value: 'list_flags' }, + mode: 'advanced', + }, + + // Limit — for list operations and audit log + { + id: 'limit', + title: 'Limit', + type: 'short-input', + placeholder: '20', + condition: { + field: 'operation', + value: [ + 'list_flags', + 'list_projects', + 'list_environments', + 'list_segments', + 'list_members', + 'get_audit_log', + ], + }, + mode: 'advanced', + }, + ], + + tools: { + access: [ + 'launchdarkly_create_flag', + 'launchdarkly_delete_flag', + 'launchdarkly_get_audit_log', + 'launchdarkly_get_flag', + 'launchdarkly_get_flag_status', + 'launchdarkly_list_environments', + 'launchdarkly_list_flags', + 'launchdarkly_list_members', + 'launchdarkly_list_projects', + 'launchdarkly_list_segments', + 'launchdarkly_toggle_flag', + 'launchdarkly_update_flag', + ], + config: { + tool: (params) => { + const operation = params.operation || 'list_flags' + return `launchdarkly_${operation}` + }, + params: (params) => { + const { operation, flagName, newFlagKey, ...rest } = params + + if (operation === 'create_flag') { + rest.name = flagName + rest.key = newFlagKey + } + + if (operation === 'toggle_flag') { + rest.enabled = rest.enabled === 'true' + } + + if (rest.temporary !== undefined) { + rest.temporary = rest.temporary === 'true' + } + + if (rest.archive !== undefined) { + if (rest.archive === 'true') rest.archive = true + else if (rest.archive === 'false') rest.archive = false + else rest.archive = undefined + } + + if (rest.limit) { + rest.limit = Number(rest.limit) + } + + return rest + }, + }, + }, + + inputs: { + operation: { type: 'string', description: 'Operation to perform' }, + apiKey: { type: 'string', description: 'LaunchDarkly API key' }, + projectKey: { type: 'string', description: 'Project key' }, + flagKey: { type: 'string', description: 'Feature flag key' }, + environmentKey: { type: 'string', description: 'Environment key' }, + enabled: { type: 'string', description: 'Whether to enable or disable the flag' }, + flagName: { type: 'string', description: 'Human-readable name for the flag' }, + newFlagKey: { type: 'string', description: 'Unique key for the new flag' }, + description: { type: 'string', description: 'Flag description' }, + tags: { type: 'string', description: 'Comma-separated tags' }, + temporary: { type: 'string', description: 'Whether the flag is temporary' }, + updateName: { type: 'string', description: 'New name for update operation' }, + updateDescription: { type: 'string', description: 'New description for update operation' }, + addTags: { type: 'string', description: 'Comma-separated tags to add' }, + removeTags: { type: 'string', description: 'Comma-separated tags to remove' }, + archive: { type: 'string', description: 'Archive or restore flag' }, + comment: { type: 'string', description: 'Comment for the update' }, + spec: { type: 'string', description: 'Audit log filter expression' }, + tag: { type: 'string', description: 'Filter flags by tag' }, + limit: { type: 'string', description: 'Maximum number of results' }, + }, + + outputs: { + flags: { type: 'json', description: 'List of feature flags' }, + totalCount: { type: 'number', description: 'Total number of results' }, + key: { type: 'string', description: 'Feature flag key' }, + name: { type: 'string', description: 'Feature flag or status name' }, + kind: { type: 'string', description: 'Flag type (boolean or multivariate)' }, + description: { type: 'string', description: 'Flag description' }, + temporary: { type: 'boolean', description: 'Whether the flag is temporary' }, + archived: { type: 'boolean', description: 'Whether the flag is archived' }, + on: { type: 'boolean', description: 'Whether the flag is on in the environment' }, + deleted: { type: 'boolean', description: 'Whether the flag was deleted' }, + projects: { type: 'json', description: 'List of projects' }, + environments: { type: 'json', description: 'List of environments' }, + segments: { type: 'json', description: 'List of segments' }, + members: { type: 'json', description: 'List of members' }, + entries: { type: 'json', description: 'List of audit log entries' }, + lastRequested: { type: 'string', description: 'Last time the flag was evaluated' }, + }, +} diff --git a/apps/sim/blocks/blocks/router.ts b/apps/sim/blocks/blocks/router.ts index 4fae2383ff4..c82c502c1c0 100644 --- a/apps/sim/blocks/blocks/router.ts +++ b/apps/sim/blocks/blocks/router.ts @@ -5,8 +5,8 @@ import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS, } from '@/blocks/utils' +import { getBaseModelProviders } from '@/providers/models' import type { ProviderId } from '@/providers/types' -import { getBaseModelProviders } from '@/providers/utils' import type { ToolResponse } from '@/tools/types' interface RouterResponse extends ToolResponse { diff --git a/apps/sim/blocks/blocks/secrets_manager.ts b/apps/sim/blocks/blocks/secrets_manager.ts new file mode 100644 index 00000000000..a5c9a5bb16a --- /dev/null +++ b/apps/sim/blocks/blocks/secrets_manager.ts @@ -0,0 +1,282 @@ +import { SecretsManagerIcon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' +import { IntegrationType } from '@/blocks/types' +import type { SecretsManagerBaseResponse } from '@/tools/secrets_manager/types' + +export const SecretsManagerBlock: BlockConfig = { + type: 'secrets_manager', + name: 'AWS Secrets Manager', + description: 'Connect to AWS Secrets Manager', + longDescription: + 'Integrate AWS Secrets Manager into the workflow. Can retrieve, create, update, list, and delete secrets.', + docsLink: 'https://docs.sim.ai/tools/secrets-manager', + category: 'tools', + integrationType: IntegrationType.DeveloperTools, + tags: ['cloud', 'secrets-management'], + bgColor: 'linear-gradient(45deg, #BD0816 0%, #FF5252 100%)', + icon: SecretsManagerIcon, + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'Get Secret', id: 'get_secret' }, + { label: 'List Secrets', id: 'list_secrets' }, + { label: 'Create Secret', id: 'create_secret' }, + { label: 'Update Secret', id: 'update_secret' }, + { label: 'Delete Secret', id: 'delete_secret' }, + ], + value: () => 'get_secret', + }, + { + id: 'region', + title: 'AWS Region', + type: 'short-input', + placeholder: 'us-east-1', + required: true, + }, + { + id: 'accessKeyId', + title: 'AWS Access Key ID', + type: 'short-input', + placeholder: 'AKIA...', + password: true, + required: true, + }, + { + id: 'secretAccessKey', + title: 'AWS Secret Access Key', + type: 'short-input', + placeholder: 'Your secret access key', + password: true, + required: true, + }, + { + id: 'secretId', + title: 'Secret Name or ARN', + type: 'short-input', + placeholder: 'my-app/database-password', + condition: { field: 'operation', value: ['get_secret', 'update_secret', 'delete_secret'] }, + required: { field: 'operation', value: ['get_secret', 'update_secret', 'delete_secret'] }, + }, + { + id: 'name', + title: 'Secret Name', + type: 'short-input', + placeholder: 'my-app/database-password', + condition: { field: 'operation', value: 'create_secret' }, + required: { field: 'operation', value: 'create_secret' }, + }, + { + id: 'secretValue', + title: 'Secret Value', + type: 'code', + placeholder: '{"username":"admin","password":"secret123"}', + condition: { field: 'operation', value: ['create_secret', 'update_secret'] }, + required: { field: 'operation', value: ['create_secret', 'update_secret'] }, + }, + { + id: 'description', + title: 'Description', + type: 'short-input', + placeholder: 'Database credentials for production', + condition: { field: 'operation', value: ['create_secret', 'update_secret'] }, + required: false, + mode: 'advanced', + }, + { + id: 'versionId', + title: 'Version ID', + type: 'short-input', + placeholder: 'Version UUID (optional)', + condition: { field: 'operation', value: 'get_secret' }, + required: false, + mode: 'advanced', + }, + { + id: 'versionStage', + title: 'Version Stage', + type: 'short-input', + placeholder: 'AWSCURRENT', + condition: { field: 'operation', value: 'get_secret' }, + required: false, + mode: 'advanced', + }, + { + id: 'maxResults', + title: 'Max Results', + type: 'short-input', + placeholder: '100', + condition: { field: 'operation', value: 'list_secrets' }, + required: false, + mode: 'advanced', + }, + { + id: 'nextToken', + title: 'Next Token', + type: 'short-input', + placeholder: 'Pagination token', + condition: { field: 'operation', value: 'list_secrets' }, + required: false, + mode: 'advanced', + }, + { + id: 'recoveryWindowInDays', + title: 'Recovery Window (Days)', + type: 'short-input', + placeholder: '30', + condition: { field: 'operation', value: 'delete_secret' }, + required: false, + mode: 'advanced', + }, + { + id: 'forceDelete', + title: 'Force Delete', + type: 'dropdown', + options: [ + { label: 'No', id: 'false' }, + { label: 'Yes', id: 'true' }, + ], + value: () => 'false', + condition: { field: 'operation', value: 'delete_secret' }, + required: false, + mode: 'advanced', + }, + ], + tools: { + access: [ + 'secrets_manager_get_secret', + 'secrets_manager_list_secrets', + 'secrets_manager_create_secret', + 'secrets_manager_update_secret', + 'secrets_manager_delete_secret', + ], + config: { + tool: (params) => { + switch (params.operation) { + case 'get_secret': + return 'secrets_manager_get_secret' + case 'list_secrets': + return 'secrets_manager_list_secrets' + case 'create_secret': + return 'secrets_manager_create_secret' + case 'update_secret': + return 'secrets_manager_update_secret' + case 'delete_secret': + return 'secrets_manager_delete_secret' + default: + throw new Error(`Invalid Secrets Manager operation: ${params.operation}`) + } + }, + params: (params) => { + const { operation, forceDelete, recoveryWindowInDays, maxResults, ...rest } = params + + const connectionConfig = { + region: rest.region, + accessKeyId: rest.accessKeyId, + secretAccessKey: rest.secretAccessKey, + } + + const result: Record = { ...connectionConfig } + + switch (operation) { + case 'get_secret': + result.secretId = rest.secretId + if (rest.versionId) result.versionId = rest.versionId + if (rest.versionStage) result.versionStage = rest.versionStage + break + case 'list_secrets': + if (maxResults) { + const parsed = Number.parseInt(String(maxResults), 10) + if (!Number.isNaN(parsed)) result.maxResults = parsed + } + if (rest.nextToken) result.nextToken = rest.nextToken + break + case 'create_secret': + result.name = rest.name + result.secretValue = rest.secretValue + if (rest.description) result.description = rest.description + break + case 'update_secret': + result.secretId = rest.secretId + result.secretValue = rest.secretValue + if (rest.description) result.description = rest.description + break + case 'delete_secret': + result.secretId = rest.secretId + if (recoveryWindowInDays) { + const parsed = Number.parseInt(String(recoveryWindowInDays), 10) + if (!Number.isNaN(parsed)) result.recoveryWindowInDays = parsed + } + if (forceDelete === 'true' || forceDelete === true) result.forceDelete = true + break + } + + return result + }, + }, + }, + inputs: { + operation: { type: 'string', description: 'Secrets Manager operation to perform' }, + region: { type: 'string', description: 'AWS region' }, + accessKeyId: { type: 'string', description: 'AWS access key ID' }, + secretAccessKey: { type: 'string', description: 'AWS secret access key' }, + secretId: { type: 'string', description: 'Secret name or ARN' }, + name: { type: 'string', description: 'Name for a new secret' }, + secretValue: { type: 'string', description: 'Secret value (plain text or JSON)' }, + description: { type: 'string', description: 'Secret description' }, + versionId: { type: 'string', description: 'Version ID' }, + versionStage: { type: 'string', description: 'Version stage (e.g., AWSCURRENT)' }, + maxResults: { type: 'number', description: 'Maximum number of results to return' }, + nextToken: { type: 'string', description: 'Pagination token' }, + recoveryWindowInDays: { type: 'number', description: 'Days before permanent deletion' }, + forceDelete: { type: 'string', description: 'Force immediate deletion' }, + }, + outputs: { + message: { + type: 'string', + description: 'Operation status message', + }, + name: { + type: 'string', + description: 'Name of the secret', + }, + secretValue: { + type: 'string', + description: 'The decrypted secret value', + }, + arn: { + type: 'string', + description: 'ARN of the secret', + }, + versionId: { + type: 'string', + description: 'Version ID of the secret', + }, + versionStages: { + type: 'array', + description: 'Staging labels attached to this version', + }, + secrets: { + type: 'json', + description: 'List of secrets', + }, + count: { + type: 'number', + description: 'Number of secrets returned', + }, + nextToken: { + type: 'string', + description: 'Pagination token for the next page', + }, + createdDate: { + type: 'string', + description: 'Date the secret was created', + }, + deletionDate: { + type: 'string', + description: 'Scheduled deletion date', + }, + }, +} diff --git a/apps/sim/blocks/blocks/tailscale.ts b/apps/sim/blocks/blocks/tailscale.ts new file mode 100644 index 00000000000..2a1f47dd0b8 --- /dev/null +++ b/apps/sim/blocks/blocks/tailscale.ts @@ -0,0 +1,341 @@ +import { TailscaleIcon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' +import { AuthMode, IntegrationType } from '@/blocks/types' + +export const TailscaleBlock: BlockConfig = { + type: 'tailscale', + name: 'Tailscale', + description: 'Manage devices and network settings in your Tailscale tailnet', + longDescription: + 'Interact with the Tailscale API to manage devices, DNS, ACLs, auth keys, users, and routes across your tailnet.', + docsLink: 'https://docs.sim.ai/tools/tailscale', + category: 'tools', + integrationType: IntegrationType.Security, + tags: ['monitoring'], + bgColor: '#2E2D2D', + icon: TailscaleIcon, + authMode: AuthMode.ApiKey, + + subBlocks: [ + { + id: 'operation', + title: 'Operation', + type: 'dropdown', + options: [ + { label: 'List Devices', id: 'list_devices' }, + { label: 'Get Device', id: 'get_device' }, + { label: 'Delete Device', id: 'delete_device' }, + { label: 'Authorize Device', id: 'authorize_device' }, + { label: 'Set Device Tags', id: 'set_device_tags' }, + { label: 'Get Device Routes', id: 'get_device_routes' }, + { label: 'Set Device Routes', id: 'set_device_routes' }, + { label: 'Update Device Key', id: 'update_device_key' }, + { label: 'List DNS Nameservers', id: 'list_dns_nameservers' }, + { label: 'Set DNS Nameservers', id: 'set_dns_nameservers' }, + { label: 'Get DNS Preferences', id: 'get_dns_preferences' }, + { label: 'Set DNS Preferences', id: 'set_dns_preferences' }, + { label: 'Get DNS Search Paths', id: 'get_dns_searchpaths' }, + { label: 'Set DNS Search Paths', id: 'set_dns_searchpaths' }, + { label: 'List Users', id: 'list_users' }, + { label: 'Create Auth Key', id: 'create_auth_key' }, + { label: 'List Auth Keys', id: 'list_auth_keys' }, + { label: 'Get Auth Key', id: 'get_auth_key' }, + { label: 'Delete Auth Key', id: 'delete_auth_key' }, + { label: 'Get ACL', id: 'get_acl' }, + ], + value: () => 'list_devices', + }, + { + id: 'apiKey', + title: 'API Key', + type: 'short-input', + password: true, + placeholder: 'tskey-api-...', + required: true, + }, + { + id: 'tailnet', + title: 'Tailnet', + type: 'short-input', + placeholder: 'example.com or "-" for default', + required: true, + }, + { + id: 'deviceId', + title: 'Device ID', + type: 'short-input', + placeholder: 'Enter device ID', + condition: { + field: 'operation', + value: [ + 'get_device', + 'delete_device', + 'authorize_device', + 'set_device_tags', + 'get_device_routes', + 'set_device_routes', + 'update_device_key', + ], + }, + required: { + field: 'operation', + value: [ + 'get_device', + 'delete_device', + 'authorize_device', + 'set_device_tags', + 'get_device_routes', + 'set_device_routes', + 'update_device_key', + ], + }, + }, + { + id: 'authorized', + title: 'Authorized', + type: 'dropdown', + options: [ + { label: 'Authorize', id: 'true' }, + { label: 'Deauthorize', id: 'false' }, + ], + value: () => 'true', + condition: { field: 'operation', value: 'authorize_device' }, + }, + { + id: 'keyExpiryDisabled', + title: 'Key Expiry Disabled', + type: 'dropdown', + options: [ + { label: 'Disable Expiry', id: 'true' }, + { label: 'Enable Expiry', id: 'false' }, + ], + value: () => 'true', + condition: { field: 'operation', value: 'update_device_key' }, + }, + { + id: 'tags', + title: 'Tags', + type: 'short-input', + placeholder: 'tag:server,tag:production', + condition: { field: 'operation', value: ['set_device_tags', 'create_auth_key'] }, + required: { field: 'operation', value: 'set_device_tags' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a comma-separated list of Tailscale ACL tags. Each tag must start with "tag:" (e.g., tag:server,tag:production). Return ONLY the comma-separated tags - no explanations, no extra text.', + }, + }, + { + id: 'routes', + title: 'Routes', + type: 'short-input', + placeholder: '10.0.0.0/24,192.168.1.0/24', + condition: { field: 'operation', value: 'set_device_routes' }, + required: { field: 'operation', value: 'set_device_routes' }, + wandConfig: { + enabled: true, + prompt: + 'Generate a comma-separated list of subnet routes in CIDR notation (e.g., 10.0.0.0/24,192.168.1.0/24). Return ONLY the comma-separated routes - no explanations, no extra text.', + }, + }, + { + id: 'dnsServers', + title: 'DNS Nameservers', + type: 'short-input', + placeholder: '8.8.8.8,8.8.4.4', + condition: { field: 'operation', value: 'set_dns_nameservers' }, + required: { field: 'operation', value: 'set_dns_nameservers' }, + }, + { + id: 'magicDNS', + title: 'MagicDNS', + type: 'dropdown', + options: [ + { label: 'Enable', id: 'true' }, + { label: 'Disable', id: 'false' }, + ], + value: () => 'true', + condition: { field: 'operation', value: 'set_dns_preferences' }, + }, + { + id: 'searchPaths', + title: 'Search Paths', + type: 'short-input', + placeholder: 'corp.example.com,internal.example.com', + condition: { field: 'operation', value: 'set_dns_searchpaths' }, + required: { field: 'operation', value: 'set_dns_searchpaths' }, + }, + { + id: 'keyId', + title: 'Auth Key ID', + type: 'short-input', + placeholder: 'Enter auth key ID', + condition: { field: 'operation', value: ['get_auth_key', 'delete_auth_key'] }, + required: { field: 'operation', value: ['get_auth_key', 'delete_auth_key'] }, + }, + { + id: 'reusable', + title: 'Reusable', + type: 'dropdown', + options: [ + { label: 'No', id: 'false' }, + { label: 'Yes', id: 'true' }, + ], + value: () => 'false', + condition: { field: 'operation', value: 'create_auth_key' }, + mode: 'advanced', + }, + { + id: 'ephemeral', + title: 'Ephemeral', + type: 'dropdown', + options: [ + { label: 'No', id: 'false' }, + { label: 'Yes', id: 'true' }, + ], + value: () => 'false', + condition: { field: 'operation', value: 'create_auth_key' }, + mode: 'advanced', + }, + { + id: 'preauthorized', + title: 'Preauthorized', + type: 'dropdown', + options: [ + { label: 'Yes', id: 'true' }, + { label: 'No', id: 'false' }, + ], + value: () => 'true', + condition: { field: 'operation', value: 'create_auth_key' }, + mode: 'advanced', + }, + { + id: 'authKeyDescription', + title: 'Description', + type: 'short-input', + placeholder: 'Auth key description', + condition: { field: 'operation', value: 'create_auth_key' }, + mode: 'advanced', + }, + { + id: 'expirySeconds', + title: 'Expiry (seconds)', + type: 'short-input', + placeholder: '7776000 (90 days)', + condition: { field: 'operation', value: 'create_auth_key' }, + mode: 'advanced', + }, + ], + + tools: { + access: [ + 'tailscale_list_devices', + 'tailscale_get_device', + 'tailscale_delete_device', + 'tailscale_authorize_device', + 'tailscale_set_device_tags', + 'tailscale_get_device_routes', + 'tailscale_set_device_routes', + 'tailscale_update_device_key', + 'tailscale_list_dns_nameservers', + 'tailscale_set_dns_nameservers', + 'tailscale_get_dns_preferences', + 'tailscale_set_dns_preferences', + 'tailscale_get_dns_searchpaths', + 'tailscale_set_dns_searchpaths', + 'tailscale_list_users', + 'tailscale_create_auth_key', + 'tailscale_list_auth_keys', + 'tailscale_get_auth_key', + 'tailscale_delete_auth_key', + 'tailscale_get_acl', + ], + config: { + tool: (params) => `tailscale_${params.operation}`, + params: (params) => { + const mapped: Record = { + apiKey: params.apiKey, + tailnet: params.tailnet, + } + if (params.deviceId) mapped.deviceId = params.deviceId + if (params.keyId) mapped.keyId = params.keyId + if (params.tags) mapped.tags = params.tags + if (params.routes) mapped.routes = params.routes + if (params.dnsServers) mapped.dns = params.dnsServers + if (params.searchPaths) mapped.searchPaths = params.searchPaths + if (params.authorized !== undefined) mapped.authorized = params.authorized === 'true' + if (params.keyExpiryDisabled !== undefined) + mapped.keyExpiryDisabled = params.keyExpiryDisabled === 'true' + if (params.magicDNS !== undefined) mapped.magicDNS = params.magicDNS === 'true' + if (params.authKeyDescription) mapped.description = params.authKeyDescription + if (params.reusable !== undefined) mapped.reusable = params.reusable === 'true' + if (params.ephemeral !== undefined) mapped.ephemeral = params.ephemeral === 'true' + if (params.preauthorized !== undefined) + mapped.preauthorized = params.preauthorized === 'true' + if (params.expirySeconds) mapped.expirySeconds = Number(params.expirySeconds) + return mapped + }, + }, + }, + + inputs: { + apiKey: { type: 'string', description: 'Tailscale API key' }, + tailnet: { type: 'string', description: 'Tailnet name' }, + deviceId: { type: 'string', description: 'Device ID' }, + keyId: { type: 'string', description: 'Auth key ID' }, + authorized: { type: 'string', description: 'Authorization status' }, + keyExpiryDisabled: { type: 'string', description: 'Whether to disable key expiry' }, + tags: { type: 'string', description: 'Comma-separated tags' }, + routes: { type: 'string', description: 'Comma-separated subnet routes' }, + dnsServers: { type: 'string', description: 'Comma-separated DNS nameserver IPs' }, + magicDNS: { type: 'string', description: 'Enable or disable MagicDNS' }, + searchPaths: { type: 'string', description: 'Comma-separated DNS search path domains' }, + reusable: { type: 'string', description: 'Whether the auth key is reusable' }, + ephemeral: { type: 'string', description: 'Whether devices are ephemeral' }, + preauthorized: { type: 'string', description: 'Whether devices are pre-authorized' }, + authKeyDescription: { type: 'string', description: 'Auth key description' }, + expirySeconds: { type: 'string', description: 'Auth key expiry in seconds' }, + }, + + outputs: { + devices: { type: 'json', description: 'List of devices in the tailnet' }, + count: { type: 'number', description: 'Total count of items returned' }, + id: { type: 'string', description: 'Device or auth key ID' }, + name: { type: 'string', description: 'Device name' }, + hostname: { type: 'string', description: 'Device hostname' }, + user: { type: 'string', description: 'Associated user' }, + os: { type: 'string', description: 'Operating system' }, + clientVersion: { type: 'string', description: 'Tailscale client version' }, + addresses: { type: 'json', description: 'Tailscale IP addresses' }, + tags: { type: 'json', description: 'Device or auth key tags' }, + authorized: { type: 'boolean', description: 'Whether the device is authorized' }, + blocksIncomingConnections: { + type: 'boolean', + description: 'Whether the device blocks incoming connections', + }, + lastSeen: { type: 'string', description: 'Last seen timestamp' }, + created: { type: 'string', description: 'Creation timestamp' }, + enabledRoutes: { type: 'json', description: 'Enabled subnet routes' }, + advertisedRoutes: { type: 'json', description: 'Advertised subnet routes' }, + isExternal: { type: 'boolean', description: 'Whether the device is external' }, + updateAvailable: { type: 'boolean', description: 'Whether an update is available' }, + machineKey: { type: 'string', description: 'Machine key' }, + nodeKey: { type: 'string', description: 'Node key' }, + success: { type: 'boolean', description: 'Whether the operation succeeded' }, + deviceId: { type: 'string', description: 'Device ID' }, + keyExpiryDisabled: { type: 'boolean', description: 'Whether key expiry is disabled' }, + dns: { type: 'json', description: 'DNS nameserver addresses' }, + magicDNS: { type: 'boolean', description: 'Whether MagicDNS is enabled' }, + searchPaths: { type: 'json', description: 'DNS search paths' }, + users: { type: 'json', description: 'List of users in the tailnet' }, + keys: { type: 'json', description: 'List of auth keys' }, + key: { type: 'string', description: 'Auth key value (only at creation)' }, + keyId: { type: 'string', description: 'Auth key ID' }, + description: { type: 'string', description: 'Auth key description' }, + expires: { type: 'string', description: 'Expiration timestamp' }, + revoked: { type: 'string', description: 'Revocation timestamp' }, + capabilities: { type: 'json', description: 'Auth key capabilities' }, + acl: { type: 'string', description: 'ACL policy as JSON string' }, + etag: { type: 'string', description: 'ACL ETag for conditional updates' }, + }, +} diff --git a/apps/sim/blocks/registry.ts b/apps/sim/blocks/registry.ts index 1461cd58a60..574fdd000eb 100644 --- a/apps/sim/blocks/registry.ts +++ b/apps/sim/blocks/registry.ts @@ -42,6 +42,7 @@ import { EnrichBlock } from '@/blocks/blocks/enrich' import { EvaluatorBlock } from '@/blocks/blocks/evaluator' import { EvernoteBlock } from '@/blocks/blocks/evernote' import { ExaBlock } from '@/blocks/blocks/exa' +import { ExtendBlock, ExtendV2Block } from '@/blocks/blocks/extend' import { FathomBlock } from '@/blocks/blocks/fathom' import { FileBlock, FileV2Block, FileV3Block } from '@/blocks/blocks/file' import { FirecrawlBlock } from '@/blocks/blocks/firecrawl' @@ -95,6 +96,7 @@ import { KalshiBlock, KalshiV2Block } from '@/blocks/blocks/kalshi' import { KetchBlock } from '@/blocks/blocks/ketch' import { KnowledgeBlock } from '@/blocks/blocks/knowledge' import { LangsmithBlock } from '@/blocks/blocks/langsmith' +import { LaunchDarklyBlock } from '@/blocks/blocks/launchdarkly' import { LemlistBlock } from '@/blocks/blocks/lemlist' import { LinearBlock } from '@/blocks/blocks/linear' import { LinkedInBlock } from '@/blocks/blocks/linkedin' @@ -155,6 +157,7 @@ import { S3Block } from '@/blocks/blocks/s3' import { SalesforceBlock } from '@/blocks/blocks/salesforce' import { ScheduleBlock } from '@/blocks/blocks/schedule' import { SearchBlock } from '@/blocks/blocks/search' +import { SecretsManagerBlock } from '@/blocks/blocks/secrets_manager' import { SendGridBlock } from '@/blocks/blocks/sendgrid' import { SentryBlock } from '@/blocks/blocks/sentry' import { SerperBlock } from '@/blocks/blocks/serper' @@ -175,6 +178,7 @@ import { StripeBlock } from '@/blocks/blocks/stripe' import { SttBlock, SttV2Block } from '@/blocks/blocks/stt' import { SupabaseBlock } from '@/blocks/blocks/supabase' import { TableBlock } from '@/blocks/blocks/table' +import { TailscaleBlock } from '@/blocks/blocks/tailscale' import { TavilyBlock } from '@/blocks/blocks/tavily' import { TelegramBlock } from '@/blocks/blocks/telegram' import { TextractBlock, TextractV2Block } from '@/blocks/blocks/textract' @@ -254,9 +258,11 @@ export const registry: Record = { elevenlabs: ElevenLabsBlock, fathom: FathomBlock, enrich: EnrichBlock, - evernote: EvernoteBlock, evaluator: EvaluatorBlock, + evernote: EvernoteBlock, exa: ExaBlock, + extend: ExtendBlock, + extend_v2: ExtendV2Block, file: FileBlock, file_v2: FileV2Block, file_v3: FileV3Block, @@ -319,6 +325,7 @@ export const registry: Record = { ketch: KetchBlock, knowledge: KnowledgeBlock, langsmith: LangsmithBlock, + launchdarkly: LaunchDarklyBlock, lemlist: LemlistBlock, linear: LinearBlock, linkedin: LinkedInBlock, @@ -393,6 +400,7 @@ export const registry: Record = { slack: SlackBlock, smtp: SmtpBlock, spotify: SpotifyBlock, + secrets_manager: SecretsManagerBlock, sqs: SQSBlock, ssh: SSHBlock, stagehand: StagehandBlock, @@ -403,6 +411,7 @@ export const registry: Record = { stt_v2: SttV2Block, supabase: SupabaseBlock, table: TableBlock, + tailscale: TailscaleBlock, tavily: TavilyBlock, telegram: TelegramBlock, textract: TextractBlock, diff --git a/apps/sim/blocks/types.ts b/apps/sim/blocks/types.ts index cfed6eeb7bb..614c686ec54 100644 --- a/apps/sim/blocks/types.ts +++ b/apps/sim/blocks/types.ts @@ -89,6 +89,7 @@ export type IntegrationTag = | 'forms' | 'link-management' | 'events' + | 'feature-flags' // Authentication modes for sub-blocks and summaries export enum AuthMode { diff --git a/apps/sim/blocks/utils.ts b/apps/sim/blocks/utils.ts index 373538b50e3..e06dbdf5add 100644 --- a/apps/sim/blocks/utils.ts +++ b/apps/sim/blocks/utils.ts @@ -4,10 +4,14 @@ import { getHostedModels, getProviderFromModel, getProviderIcon, - providers, -} from '@/providers/utils' + getProviderModels, +} from '@/providers/models' import { useProvidersStore } from '@/stores/providers/store' +const VERTEX_MODELS = getProviderModels('vertex') +const BEDROCK_MODELS = getProviderModels('bedrock') +const AZURE_MODELS = [...getProviderModels('azure-openai'), ...getProviderModels('azure-anthropic')] + /** * Returns model options for combobox subblocks, combining all provider sources. */ @@ -17,8 +21,15 @@ export function getModelOptions() { const ollamaModels = providersState.providers.ollama.models const vllmModels = providersState.providers.vllm.models const openrouterModels = providersState.providers.openrouter.models + const fireworksModels = providersState.providers.fireworks.models const allModels = Array.from( - new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels]) + new Set([ + ...baseModels, + ...ollamaModels, + ...vllmModels, + ...openrouterModels, + ...fireworksModels, + ]) ) return allModels.map((model) => { @@ -152,7 +163,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { required: true, condition: { field: 'model', - value: providers.vertex.models, + value: VERTEX_MODELS, }, }, { @@ -174,7 +185,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { connectionDroppable: false, condition: { field: 'model', - value: [...providers['azure-openai'].models, ...providers['azure-anthropic'].models], + value: AZURE_MODELS, }, }, { @@ -185,7 +196,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { connectionDroppable: false, condition: { field: 'model', - value: [...providers['azure-openai'].models, ...providers['azure-anthropic'].models], + value: AZURE_MODELS, }, }, { @@ -197,7 +208,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { required: true, condition: { field: 'model', - value: providers.vertex.models, + value: VERTEX_MODELS, }, }, { @@ -209,7 +220,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { required: true, condition: { field: 'model', - value: providers.vertex.models, + value: VERTEX_MODELS, }, }, { @@ -222,7 +233,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { required: true, condition: { field: 'model', - value: providers.bedrock.models, + value: BEDROCK_MODELS, }, }, { @@ -235,7 +246,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { required: true, condition: { field: 'model', - value: providers.bedrock.models, + value: BEDROCK_MODELS, }, }, { @@ -246,7 +257,7 @@ export function getProviderCredentialSubBlocks(): SubBlockConfig[] { connectionDroppable: false, condition: { field: 'model', - value: providers.bedrock.models, + value: BEDROCK_MODELS, }, }, ] diff --git a/apps/sim/components/icons.tsx b/apps/sim/components/icons.tsx index 6f53db86f8b..f89c7fddc07 100644 --- a/apps/sim/components/icons.tsx +++ b/apps/sim/components/icons.tsx @@ -683,6 +683,45 @@ export function SerperIcon(props: SVGProps) { ) } +export function TailscaleIcon(props: SVGProps) { + return ( + + + + + + + + + + ) +} + export function TavilyIcon(props: SVGProps) { return ( @@ -2041,6 +2080,19 @@ export function Mem0Icon(props: SVGProps) { ) } +export function ExtendIcon(props: SVGProps) { + return ( + + + + ) +} + export function EvernoteIcon(props: SVGProps) { return ( @@ -2152,6 +2204,17 @@ export function LangsmithIcon(props: SVGProps) { ) } +export function LaunchDarklyIcon(props: SVGProps) { + return ( + + + + ) +} + export function LemlistIcon(props: SVGProps) { return ( @@ -3420,6 +3483,25 @@ export function MySQLIcon(props: SVGProps) { ) } +export function FireworksIcon(props: SVGProps) { + return ( + + + + ) +} + export function OpenRouterIcon(props: SVGProps) { return ( ) { ) } +export function SecretsManagerIcon(props: SVGProps) { + return ( + + + + + + + + + + + ) +} + export function SQSIcon(props: SVGProps) { return ( t.params?.serverId).filter(Boolean))] if (serverIds.length === 0) return tools + if (!ctx.workspaceId) { + logger.warn('Skipping MCP availability filtering without workspace scope') + return tools + } + const availableServerIds = new Set() - if (ctx.workspaceId && serverIds.length > 0) { + if (serverIds.length > 0) { try { const servers = await db .select({ id: mcpServers.id, connectionStatus: mcpServers.connectionStatus }) @@ -245,8 +252,6 @@ export class AgentBlockHandler implements BlockHandler { return null } - const { filterSchemaForLLM } = await import('@/tools/params') - const filteredSchema = filterSchemaForLLM(schema.function.parameters, userProvidedParams) const toolId = `${AGENT.CUSTOM_TOOL_PREFIX}${title}` @@ -272,22 +277,6 @@ export class AgentBlockHandler implements BlockHandler { ctx: ExecutionContext, customToolId: string ): Promise<{ schema: any; title: string } | null> { - if (typeof window !== 'undefined') { - try { - const { getCustomTool } = await import('@/hooks/queries/custom-tools') - const tool = getCustomTool(customToolId, ctx.workspaceId) - if (tool) { - return { - schema: tool.schema, - title: tool.title, - } - } - logger.warn(`Custom tool not found in cache: ${customToolId}`) - } catch (error) { - logger.error('Error accessing custom tools cache:', { error }) - } - } - try { const headers = await buildAuthHeaders(ctx.userId) const params: Record = {} @@ -572,7 +561,12 @@ export class AgentBlockHandler implements BlockHandler { const transformedTool = await transformBlockTool(tool, { selectedOperation: tool.operation, getAllBlocks, - getToolAsync: (toolId: string) => getToolAsync(toolId, ctx.workflowId), + getToolAsync: (toolId: string) => + getToolAsync(toolId, { + workflowId: ctx.workflowId, + userId: ctx.userId, + workspaceId: ctx.workspaceId, + }), getTool, canonicalModes, }) diff --git a/apps/sim/executor/handlers/workflow/workflow-handler.ts b/apps/sim/executor/handlers/workflow/workflow-handler.ts index 50db926be7d..4e92948ee17 100644 --- a/apps/sim/executor/handlers/workflow/workflow-handler.ts +++ b/apps/sim/executor/handlers/workflow/workflow-handler.ts @@ -21,7 +21,6 @@ import { parseJSON } from '@/executor/utils/json' import { lazyCleanupInputMapping } from '@/executor/utils/lazy-cleanup' import { Serializer } from '@/serializer' import type { SerializedBlock } from '@/serializer/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('WorkflowBlockHandler') @@ -74,10 +73,7 @@ export class WorkflowBlockHandler implements BlockHandler { throw new Error('No workflow selected for execution') } - // Initialize with registry name, will be updated with loaded workflow name - const { workflows } = useWorkflowRegistry.getState() - const workflowMetadata = workflows[workflowId] - let childWorkflowName = workflowMetadata?.name || workflowId + let childWorkflowName = workflowId // Unique ID per invocation — used to correlate child block events with this specific // workflow block execution, preventing cross-iteration child mixing in loop contexts. @@ -111,8 +107,7 @@ export class WorkflowBlockHandler implements BlockHandler { throw new Error(`Child workflow ${workflowId} not found`) } - // Update with loaded workflow name (more reliable than registry) - childWorkflowName = workflowMetadata?.name || childWorkflow.name || 'Unknown Workflow' + childWorkflowName = childWorkflow.name || 'Unknown Workflow' logger.info( `Executing child workflow: ${childWorkflowName} (${workflowId}), call chain depth ${ctx.callChain?.length || 0}` diff --git a/apps/sim/hooks/kb/use-tag-selection.ts b/apps/sim/hooks/kb/use-tag-selection.ts index 37ef53f9b97..490a75f8015 100644 --- a/apps/sim/hooks/kb/use-tag-selection.ts +++ b/apps/sim/hooks/kb/use-tag-selection.ts @@ -9,7 +9,7 @@ export function useTagSelection(blockId: string, subblockId: string) { const { collaborativeSetTagSelection } = useCollaborativeWorkflow() const emitTagSelectionValue = useCallback( - (value: any) => { + (value: string) => { collaborativeSetTagSelection(blockId, subblockId, value) }, [blockId, subblockId, collaborativeSetTagSelection] diff --git a/apps/sim/hooks/queries/custom-tools.ts b/apps/sim/hooks/queries/custom-tools.ts index adb8af7183b..78af01fd0ee 100644 --- a/apps/sim/hooks/queries/custom-tools.ts +++ b/apps/sim/hooks/queries/custom-tools.ts @@ -1,6 +1,6 @@ import { createLogger } from '@sim/logger' import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' -import { getQueryClient } from '@/app/_shell/providers/query-provider' +import { customToolsKeys } from '@/hooks/queries/utils/custom-tool-keys' const logger = createLogger('CustomToolsQueries') const API_ENDPOINT = '/api/tools/custom' @@ -29,16 +29,6 @@ export interface CustomToolDefinition { updatedAt?: string } -/** - * Query key factories for custom tools queries - */ -export const customToolsKeys = { - all: ['customTools'] as const, - lists: () => [...customToolsKeys.all, 'list'] as const, - list: (workspaceId: string) => [...customToolsKeys.lists(), workspaceId] as const, - detail: (toolId: string) => [...customToolsKeys.all, 'detail', toolId] as const, -} - export type CustomTool = CustomToolDefinition type ApiCustomTool = Partial & { @@ -87,41 +77,6 @@ function normalizeCustomTool(tool: ApiCustomTool, workspaceId: string): CustomTo } } -/** - * Extract workspaceId from the current URL path - * Expected format: /workspace/{workspaceId}/... - */ -function getWorkspaceIdFromUrl(): string | null { - if (typeof window === 'undefined') return null - const match = window.location.pathname.match(/^\/workspace\/([^/]+)/) - return match?.[1] ?? null -} - -/** - * Get all custom tools from the query cache (for non-React code) - * If workspaceId is not provided, extracts it from the current URL - */ -export function getCustomTools(workspaceId?: string): CustomToolDefinition[] { - if (typeof window === 'undefined') return [] - const wsId = workspaceId ?? getWorkspaceIdFromUrl() - if (!wsId) return [] - const queryClient = getQueryClient() - return queryClient.getQueryData(customToolsKeys.list(wsId)) ?? [] -} - -/** - * Get a specific custom tool from the query cache by ID or title (for non-React code) - * Custom tools are referenced by title in the system (custom_${title}), so title lookup is required. - * If workspaceId is not provided, extracts it from the current URL - */ -export function getCustomTool( - identifier: string, - workspaceId?: string -): CustomToolDefinition | undefined { - const tools = getCustomTools(workspaceId) - return tools.find((tool) => tool.id === identifier || tool.title === identifier) -} - /** * Fetch custom tools for a workspace */ diff --git a/apps/sim/hooks/queries/deployments.test.ts b/apps/sim/hooks/queries/deployments.test.ts new file mode 100644 index 00000000000..97628bf202b --- /dev/null +++ b/apps/sim/hooks/queries/deployments.test.ts @@ -0,0 +1,51 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { invalidateDeploymentQueries } from '@/hooks/queries/deployments' +import { fetchDeploymentVersionState } from '@/hooks/queries/utils/fetch-deployment-version-state' + +describe('deployment query helpers', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('invalidates the deployment info, state, and versions queries', async () => { + const queryClient = { + invalidateQueries: vi.fn().mockResolvedValue(undefined), + } + + await invalidateDeploymentQueries(queryClient as any, 'wf-1') + + expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(1, { + queryKey: ['deployments', 'info', 'wf-1'], + }) + expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(2, { + queryKey: ['deployments', 'deployedState', 'wf-1'], + }) + expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(3, { + queryKey: ['deployments', 'versions', 'wf-1'], + }) + }) + + it('fetches deployment version state through the shared helper', async () => { + global.fetch = vi.fn().mockResolvedValue({ + ok: true, + json: async () => ({ + deployedState: { blocks: {}, edges: [], loops: {}, parallels: {}, lastSaved: 1 }, + }), + }) as typeof fetch + + await expect(fetchDeploymentVersionState('wf-1', 3)).resolves.toEqual({ + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + lastSaved: 1, + }) + + expect(global.fetch).toHaveBeenCalledWith('/api/workflows/wf-1/deployments/3', { + signal: undefined, + }) + }) +}) diff --git a/apps/sim/hooks/queries/deployments.ts b/apps/sim/hooks/queries/deployments.ts index 0896e4d599b..62f5e970d3b 100644 --- a/apps/sim/hooks/queries/deployments.ts +++ b/apps/sim/hooks/queries/deployments.ts @@ -3,9 +3,10 @@ import { createLogger } from '@sim/logger' import type { QueryClient } from '@tanstack/react-query' import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils' +import { fetchDeploymentVersionState } from '@/hooks/queries/utils/fetch-deployment-version-state' +import { workflowKeys } from '@/hooks/queries/utils/workflow-keys' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import type { WorkflowState } from '@/stores/workflows/workflow/types' -import { fetchDeploymentVersionState, workflowKeys } from './workflows' const logger = createLogger('DeploymentQueries') diff --git a/apps/sim/hooks/queries/folders.test.ts b/apps/sim/hooks/queries/folders.test.ts index 9af6eaa5bc0..355b3d899e3 100644 --- a/apps/sim/hooks/queries/folders.test.ts +++ b/apps/sim/hooks/queries/folders.test.ts @@ -1,33 +1,32 @@ import { beforeEach, describe, expect, it, vi } from 'vitest' -const { mockLogger, queryClient, useFolderStoreMock, useWorkflowRegistryMock } = vi.hoisted(() => ({ +const { mockLogger, mockGetFolderMap, mockGetWorkflows, queryClient } = vi.hoisted(() => ({ mockLogger: { info: vi.fn(), warn: vi.fn(), error: vi.fn(), debug: vi.fn(), }, + mockGetFolderMap: vi.fn(() => ({})), + mockGetWorkflows: vi.fn(() => []), queryClient: { cancelQueries: vi.fn().mockResolvedValue(undefined), invalidateQueries: vi.fn().mockResolvedValue(undefined), + getQueryData: vi.fn(), + setQueryData: vi.fn(), }, - useFolderStoreMock: Object.assign(vi.fn(), { - getState: vi.fn(), - setState: vi.fn(), - }), - useWorkflowRegistryMock: Object.assign(vi.fn(), { - getState: vi.fn(), - setState: vi.fn(), - }), })) -let folderState: { - folders: Record -} +let folderMapState: Record +let folderListState: any[] -let workflowRegistryState: { - workflows: Record -} +let workflowList: Array<{ + id: string + name: string + workspaceId: string + folderId: string + sortOrder: number +}> vi.mock('@sim/logger', () => ({ createLogger: vi.fn(() => mockLogger), @@ -40,15 +39,15 @@ vi.mock('@tanstack/react-query', () => ({ useMutation: vi.fn((options) => options), })) -vi.mock('@/stores/folders/store', () => ({ - useFolderStore: useFolderStoreMock, +vi.mock('@/hooks/queries/utils/workflow-cache', () => ({ + getWorkflows: mockGetWorkflows, })) -vi.mock('@/stores/workflows/registry/store', () => ({ - useWorkflowRegistry: useWorkflowRegistryMock, +vi.mock('@/hooks/queries/utils/folder-cache', () => ({ + getFolderMap: mockGetFolderMap, })) -vi.mock('@/hooks/queries/workflows', () => ({ +vi.mock('@/hooks/queries/utils/workflow-keys', () => ({ workflowKeys: { list: (workspaceId: string | undefined) => ['workflows', 'list', workspaceId ?? ''], }, @@ -57,7 +56,7 @@ vi.mock('@/hooks/queries/workflows', () => ({ import { useCreateFolder, useDuplicateFolderMutation } from '@/hooks/queries/folders' function getOptimisticFolderByName(name: string) { - return Object.values(folderState.folders).find((folder: any) => folder.name === name) as + return Object.values(folderMapState).find((folder: any) => folder.name === name) as | { sortOrder: number } | undefined } @@ -65,67 +64,60 @@ function getOptimisticFolderByName(name: string) { describe('folder optimistic top insertion ordering', () => { beforeEach(() => { vi.clearAllMocks() - useFolderStoreMock.getState.mockImplementation(() => folderState) - useFolderStoreMock.setState.mockImplementation((updater: any) => { - if (typeof updater === 'function') { - const next = updater(folderState) - if (next) { - folderState = { ...folderState, ...next } - } - return - } - - folderState = { ...folderState, ...updater } + queryClient.getQueryData.mockImplementation(() => folderListState) + queryClient.setQueryData.mockImplementation((_key: unknown, updater: any) => { + folderListState = typeof updater === 'function' ? updater(folderListState) : updater + folderMapState = Object.fromEntries( + (folderListState ?? []).map((folder: any) => [folder.id, folder]) + ) }) - useWorkflowRegistryMock.getState.mockImplementation(() => workflowRegistryState) - - folderState = { - folders: { - 'folder-parent-match': { - id: 'folder-parent-match', - name: 'Existing sibling folder', - userId: 'user-1', - workspaceId: 'ws-1', - parentId: 'parent-1', - color: '#808080', - isExpanded: false, - sortOrder: 5, - createdAt: new Date(), - updatedAt: new Date(), - }, - 'folder-other-parent': { - id: 'folder-other-parent', - name: 'Other parent folder', - userId: 'user-1', - workspaceId: 'ws-1', - parentId: 'parent-2', - color: '#808080', - isExpanded: false, - sortOrder: -100, - createdAt: new Date(), - updatedAt: new Date(), - }, + mockGetFolderMap.mockImplementation(() => folderMapState) + mockGetWorkflows.mockImplementation(() => workflowList) + + folderListState = [ + { + id: 'folder-parent-match', + name: 'Existing sibling folder', + userId: 'user-1', + workspaceId: 'ws-1', + parentId: 'parent-1', + color: '#808080', + isExpanded: false, + sortOrder: 5, + createdAt: new Date(), + updatedAt: new Date(), + }, + { + id: 'folder-other-parent', + name: 'Other parent folder', + userId: 'user-1', + workspaceId: 'ws-1', + parentId: 'parent-2', + color: '#808080', + isExpanded: false, + sortOrder: -100, + createdAt: new Date(), + updatedAt: new Date(), + }, + ] + folderMapState = Object.fromEntries(folderListState.map((folder) => [folder.id, folder])) + + workflowList = [ + { + id: 'workflow-parent-match', + name: 'Existing sibling workflow', + workspaceId: 'ws-1', + folderId: 'parent-1', + sortOrder: 2, }, - } - - workflowRegistryState = { - workflows: { - 'workflow-parent-match': { - id: 'workflow-parent-match', - name: 'Existing sibling workflow', - workspaceId: 'ws-1', - folderId: 'parent-1', - sortOrder: 2, - }, - 'workflow-other-parent': { - id: 'workflow-other-parent', - name: 'Other parent workflow', - workspaceId: 'ws-1', - folderId: 'parent-2', - sortOrder: -50, - }, + { + id: 'workflow-other-parent', + name: 'Other parent workflow', + workspaceId: 'ws-1', + folderId: 'parent-2', + sortOrder: -50, }, - } + ] }) it('creates folders at top of mixed non-root siblings', async () => { diff --git a/apps/sim/hooks/queries/folders.ts b/apps/sim/hooks/queries/folders.ts index 5b22872feec..8b8ccb408a7 100644 --- a/apps/sim/hooks/queries/folders.ts +++ b/apps/sim/hooks/queries/folders.ts @@ -1,31 +1,25 @@ -import { useEffect } from 'react' import { createLogger } from '@sim/logger' import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' +import { getFolderMap } from '@/hooks/queries/utils/folder-cache' +import { folderKeys } from '@/hooks/queries/utils/folder-keys' +import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists' import { createOptimisticMutationHandlers, generateTempId, } from '@/hooks/queries/utils/optimistic-mutation' import { getTopInsertionSortOrder } from '@/hooks/queries/utils/top-insertion-sort-order' -import { workflowKeys } from '@/hooks/queries/workflows' -import { useFolderStore } from '@/stores/folders/store' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' import type { WorkflowFolder } from '@/stores/folders/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const logger = createLogger('FolderQueries') -export const folderKeys = { - all: ['folders'] as const, - lists: () => [...folderKeys.all, 'list'] as const, - list: (workspaceId: string | undefined) => [...folderKeys.lists(), workspaceId ?? ''] as const, -} - function mapFolder(folder: any): WorkflowFolder { return { id: folder.id, name: folder.name, userId: folder.userId, workspaceId: folder.workspaceId, - parentId: folder.parentId, + parentId: folder.parentId ?? null, color: folder.color, isExpanded: folder.isExpanded, sortOrder: folder.sortOrder, @@ -46,23 +40,24 @@ async function fetchFolders(workspaceId: string, signal?: AbortSignal): Promise< } export function useFolders(workspaceId?: string) { - const setFolders = useFolderStore((state) => state.setFolders) - - const query = useQuery({ + return useQuery({ queryKey: folderKeys.list(workspaceId), queryFn: ({ signal }) => fetchFolders(workspaceId as string, signal), enabled: Boolean(workspaceId), placeholderData: keepPreviousData, staleTime: 60 * 1000, }) +} - useEffect(() => { - if (query.data) { - setFolders(query.data) - } - }, [query.data, setFolders]) - - return query +export function useFolderMap(workspaceId?: string) { + return useQuery({ + queryKey: folderKeys.list(workspaceId), + queryFn: ({ signal }) => fetchFolders(workspaceId as string, signal), + enabled: Boolean(workspaceId), + placeholderData: keepPreviousData, + staleTime: 60 * 1000, + select: (folders) => Object.fromEntries(folders.map((folder) => [folder.id, folder])), + }) } interface CreateFolderVariables { @@ -110,54 +105,25 @@ function createFolderMutationHandlers(queryClient, { name, getQueryKey: (variables) => folderKeys.list(variables.workspaceId), - getSnapshot: () => ({ ...useFolderStore.getState().folders }), + getSnapshot: (variables) => ({ ...getFolderMap(variables.workspaceId) }), generateTempId: customGenerateTempId ?? (() => generateTempId('temp-folder')), createOptimisticItem: (variables, tempId) => { - const previousFolders = useFolderStore.getState().folders + const previousFolders = getFolderMap(variables.workspaceId) return createOptimisticFolder(variables, tempId, previousFolders) }, applyOptimisticUpdate: (tempId, item) => { - useFolderStore.setState((state) => ({ - folders: { ...state.folders, [tempId]: item }, - })) + queryClient.setQueryData(folderKeys.list(item.workspaceId), (old) => [ + ...(old ?? []), + item, + ]) }, replaceOptimisticEntry: (tempId, data) => { - useFolderStore.setState((state) => { - const { [tempId]: _, ...remainingFolders } = state.folders - - const update: Record = { - folders: { - ...remainingFolders, - [data.id]: data, - }, - } - - if (tempId !== data.id) { - const expandedFolders = new Set(state.expandedFolders) - const selectedFolders = new Set(state.selectedFolders) - - if (expandedFolders.has(tempId)) { - expandedFolders.delete(tempId) - expandedFolders.add(data.id) - } - if (selectedFolders.has(tempId)) { - selectedFolders.delete(tempId) - selectedFolders.add(data.id) - } - - update.expandedFolders = expandedFolders - update.selectedFolders = selectedFolders - - if (state.lastSelectedFolderId === tempId) { - update.lastSelectedFolderId = data.id - } - } - - return update - }) + queryClient.setQueryData(folderKeys.list(data.workspaceId), (old) => + (old ?? []).map((folder) => (folder.id === tempId ? data : folder)) + ) }, - rollback: (snapshot) => { - useFolderStore.setState({ folders: snapshot }) + rollback: (snapshot, variables) => { + queryClient.setQueryData(folderKeys.list(variables.workspaceId), Object.values(snapshot)) }, }) } @@ -169,7 +135,9 @@ export function useCreateFolder() { queryClient, 'CreateFolder', (variables, tempId, previousFolders) => { - const currentWorkflows = useWorkflowRegistry.getState().workflows + const currentWorkflows = Object.fromEntries( + getWorkflows(variables.workspaceId).map((w) => [w.id, w]) + ) return { id: tempId, @@ -233,7 +201,7 @@ export function useUpdateFolder() { const { folder } = await response.json() return mapFolder(folder) }, - onSuccess: (_data, variables) => { + onSettled: (_data, _error, variables) => { queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) }) }, }) @@ -253,9 +221,9 @@ export function useDeleteFolderMutation() { return response.json() }, - onSuccess: async (_data, variables) => { + onSettled: (_data, _error, variables) => { queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) }) - queryClient.invalidateQueries({ queryKey: workflowKeys.lists() }) + return invalidateWorkflowLists(queryClient, variables.workspaceId, ['active', 'archived']) }, }) } @@ -267,7 +235,9 @@ export function useDuplicateFolderMutation() { queryClient, 'DuplicateFolder', (variables, tempId, previousFolders) => { - const currentWorkflows = useWorkflowRegistry.getState().workflows + const currentWorkflows = Object.fromEntries( + getWorkflows(variables.workspaceId).map((w) => [w.id, w]) + ) const sourceFolder = previousFolders[variables.id] const targetParentId = variables.parentId ?? sourceFolder?.parentId ?? null @@ -324,7 +294,7 @@ export function useDuplicateFolderMutation() { ...handlers, onSettled: (_data, _error, variables) => { queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) }) - queryClient.invalidateQueries({ queryKey: workflowKeys.lists() }) + return invalidateWorkflowLists(queryClient, variables.workspaceId) }, }) } @@ -357,28 +327,29 @@ export function useReorderFolders() { onMutate: async (variables) => { await queryClient.cancelQueries({ queryKey: folderKeys.list(variables.workspaceId) }) - const snapshot = { ...useFolderStore.getState().folders } - - useFolderStore.setState((state) => { - const updated = { ...state.folders } - for (const update of variables.updates) { - if (updated[update.id]) { - updated[update.id] = { - ...updated[update.id], - sortOrder: update.sortOrder, - parentId: - update.parentId !== undefined ? update.parentId : updated[update.id].parentId, - } + const snapshot = queryClient.getQueryData( + folderKeys.list(variables.workspaceId) + ) + + const updatesById = new Map(variables.updates.map((update) => [update.id, update])) + queryClient.setQueryData(folderKeys.list(variables.workspaceId), (old) => { + if (!old?.length) return old + return old.map((folder) => { + const update = updatesById.get(folder.id) + if (!update) return folder + return { + ...folder, + sortOrder: update.sortOrder, + parentId: update.parentId !== undefined ? update.parentId : folder.parentId, } - } - return { folders: updated } + }) }) return { snapshot } }, - onError: (_error, _variables, context) => { + onError: (_error, variables, context) => { if (context?.snapshot) { - useFolderStore.setState({ folders: context.snapshot }) + queryClient.setQueryData(folderKeys.list(variables.workspaceId), context.snapshot) } }, onSettled: (_data, _error, variables) => { diff --git a/apps/sim/hooks/queries/general-settings.ts b/apps/sim/hooks/queries/general-settings.ts index 1d0644499af..0fb05164dd1 100644 --- a/apps/sim/hooks/queries/general-settings.ts +++ b/apps/sim/hooks/queries/general-settings.ts @@ -83,8 +83,8 @@ export function useGeneralSettings() { export function prefetchGeneralSettings(queryClient: QueryClient) { queryClient.prefetchQuery({ queryKey: generalSettingsKeys.settings(), - queryFn: async () => { - const settings = await fetchGeneralSettings() + queryFn: async ({ signal }) => { + const settings = await fetchGeneralSettings(signal) syncThemeToNextThemes(settings.theme) return settings }, diff --git a/apps/sim/hooks/queries/logs.ts b/apps/sim/hooks/queries/logs.ts index 3bed38cdf19..0e684c3dc85 100644 --- a/apps/sim/hooks/queries/logs.ts +++ b/apps/sim/hooks/queries/logs.ts @@ -176,7 +176,7 @@ export function useLogDetail(logId: string | undefined, options?: UseLogDetailOp export function prefetchLogDetail(queryClient: QueryClient, logId: string) { queryClient.prefetchQuery({ queryKey: logKeys.detail(logId), - queryFn: () => fetchLogDetail(logId), + queryFn: ({ signal }) => fetchLogDetail(logId, signal), staleTime: 30 * 1000, }) } diff --git a/apps/sim/hooks/queries/providers.ts b/apps/sim/hooks/queries/providers.ts index d043dbb1018..7dc18894243 100644 --- a/apps/sim/hooks/queries/providers.ts +++ b/apps/sim/hooks/queries/providers.ts @@ -9,6 +9,7 @@ const providerEndpoints: Record = { ollama: '/api/providers/ollama/models', vllm: '/api/providers/vllm/models', openrouter: '/api/providers/openrouter/models', + fireworks: '/api/providers/fireworks/models', } interface ProviderModelsResponse { @@ -18,14 +19,21 @@ interface ProviderModelsResponse { export const providerKeys = { all: ['provider-models'] as const, - models: (provider: string) => [...providerKeys.all, provider] as const, + models: (provider: string, workspaceId?: string) => + [...providerKeys.all, provider, workspaceId ?? ''] as const, } async function fetchProviderModels( provider: ProviderName, - signal?: AbortSignal + signal?: AbortSignal, + workspaceId?: string ): Promise { - const response = await fetch(providerEndpoints[provider], { signal }) + let url = providerEndpoints[provider] + if (provider === 'fireworks' && workspaceId) { + url = `${url}?workspaceId=${encodeURIComponent(workspaceId)}` + } + + const response = await fetch(url, { signal }) if (!response.ok) { logger.warn(`Failed to fetch ${provider} models`, { @@ -45,10 +53,10 @@ async function fetchProviderModels( } } -export function useProviderModels(provider: ProviderName) { +export function useProviderModels(provider: ProviderName, workspaceId?: string) { return useQuery({ - queryKey: providerKeys.models(provider), - queryFn: ({ signal }) => fetchProviderModels(provider, signal), + queryKey: providerKeys.models(provider, workspaceId), + queryFn: ({ signal }) => fetchProviderModels(provider, signal, workspaceId), staleTime: 5 * 60 * 1000, }) } diff --git a/apps/sim/hooks/queries/subscription.ts b/apps/sim/hooks/queries/subscription.ts index c1149bf6d8e..c30fbff6c3b 100644 --- a/apps/sim/hooks/queries/subscription.ts +++ b/apps/sim/hooks/queries/subscription.ts @@ -142,7 +142,7 @@ export function useSubscriptionData(options: UseSubscriptionDataOptions = {}) { export function prefetchSubscriptionData(queryClient: QueryClient) { queryClient.prefetchQuery({ queryKey: subscriptionKeys.user(false), - queryFn: () => fetchSubscriptionData(false), + queryFn: ({ signal }) => fetchSubscriptionData(false, signal), staleTime: 30 * 1000, }) } diff --git a/apps/sim/hooks/queries/utils/custom-tool-cache.test.ts b/apps/sim/hooks/queries/utils/custom-tool-cache.test.ts new file mode 100644 index 00000000000..c4b7fd54021 --- /dev/null +++ b/apps/sim/hooks/queries/utils/custom-tool-cache.test.ts @@ -0,0 +1,39 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { getQueryDataMock } = vi.hoisted(() => ({ + getQueryDataMock: vi.fn(), +})) + +vi.mock('@/app/_shell/providers/get-query-client', () => ({ + getQueryClient: vi.fn(() => ({ + getQueryData: getQueryDataMock, + })), +})) + +import { getCustomTool, getCustomTools } from '@/hooks/queries/utils/custom-tool-cache' + +describe('custom tool cache helpers', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('reads workspace-scoped custom tools from the cache', () => { + const tools = [{ id: 'tool-1', title: 'Weather', schema: {}, code: '', workspaceId: 'ws-1' }] + getQueryDataMock.mockReturnValue(tools) + + expect(getCustomTools('ws-1')).toBe(tools) + expect(getQueryDataMock).toHaveBeenCalledWith(['customTools', 'list', 'ws-1']) + }) + + it('resolves custom tools by id or title', () => { + getQueryDataMock.mockReturnValue([ + { id: 'tool-1', title: 'Weather', schema: {}, code: '', workspaceId: 'ws-1' }, + ]) + + expect(getCustomTool('tool-1', 'ws-1')?.title).toBe('Weather') + expect(getCustomTool('Weather', 'ws-1')?.id).toBe('tool-1') + }) +}) diff --git a/apps/sim/hooks/queries/utils/custom-tool-cache.ts b/apps/sim/hooks/queries/utils/custom-tool-cache.ts new file mode 100644 index 00000000000..407911ddb37 --- /dev/null +++ b/apps/sim/hooks/queries/utils/custom-tool-cache.ts @@ -0,0 +1,23 @@ +import { getQueryClient } from '@/app/_shell/providers/get-query-client' +import type { CustomToolDefinition } from '@/hooks/queries/custom-tools' +import { customToolsKeys } from '@/hooks/queries/utils/custom-tool-keys' + +/** + * Reads custom tools for a workspace directly from the React Query cache. + */ +export function getCustomTools(workspaceId: string): CustomToolDefinition[] { + return ( + getQueryClient().getQueryData(customToolsKeys.list(workspaceId)) ?? [] + ) +} + +/** + * Resolves a custom tool from the cache by id or title. + */ +export function getCustomTool( + identifier: string, + workspaceId: string +): CustomToolDefinition | undefined { + const tools = getCustomTools(workspaceId) + return tools.find((tool) => tool.id === identifier || tool.title === identifier) +} diff --git a/apps/sim/hooks/queries/utils/custom-tool-keys.ts b/apps/sim/hooks/queries/utils/custom-tool-keys.ts new file mode 100644 index 00000000000..1bb74bd8da5 --- /dev/null +++ b/apps/sim/hooks/queries/utils/custom-tool-keys.ts @@ -0,0 +1,6 @@ +export const customToolsKeys = { + all: ['customTools'] as const, + lists: () => [...customToolsKeys.all, 'list'] as const, + list: (workspaceId: string) => [...customToolsKeys.lists(), workspaceId] as const, + detail: (toolId: string) => [...customToolsKeys.all, 'detail', toolId] as const, +} diff --git a/apps/sim/hooks/queries/utils/fetch-deployment-version-state.ts b/apps/sim/hooks/queries/utils/fetch-deployment-version-state.ts new file mode 100644 index 00000000000..ded2a7a10cf --- /dev/null +++ b/apps/sim/hooks/queries/utils/fetch-deployment-version-state.ts @@ -0,0 +1,27 @@ +import type { WorkflowState } from '@/stores/workflows/workflow/types' + +interface DeploymentVersionStateResponse { + deployedState: WorkflowState +} + +/** + * Fetches the deployed state for a specific deployment version. + */ +export async function fetchDeploymentVersionState( + workflowId: string, + version: number, + signal?: AbortSignal +): Promise { + const response = await fetch(`/api/workflows/${workflowId}/deployments/${version}`, { signal }) + + if (!response.ok) { + throw new Error(`Failed to fetch deployment version: ${response.statusText}`) + } + + const data: DeploymentVersionStateResponse = await response.json() + if (!data.deployedState) { + throw new Error('No deployed state returned') + } + + return data.deployedState +} diff --git a/apps/sim/hooks/queries/utils/folder-cache.ts b/apps/sim/hooks/queries/utils/folder-cache.ts new file mode 100644 index 00000000000..158f558bb60 --- /dev/null +++ b/apps/sim/hooks/queries/utils/folder-cache.ts @@ -0,0 +1,15 @@ +import { getQueryClient } from '@/app/_shell/providers/get-query-client' +import { folderKeys } from '@/hooks/queries/utils/folder-keys' +import type { WorkflowFolder } from '@/stores/folders/types' + +const EMPTY_FOLDERS: WorkflowFolder[] = [] + +export function getFolders(workspaceId: string): WorkflowFolder[] { + return ( + getQueryClient().getQueryData(folderKeys.list(workspaceId)) ?? EMPTY_FOLDERS + ) +} + +export function getFolderMap(workspaceId: string): Record { + return Object.fromEntries(getFolders(workspaceId).map((folder) => [folder.id, folder])) +} diff --git a/apps/sim/hooks/queries/utils/folder-keys.ts b/apps/sim/hooks/queries/utils/folder-keys.ts new file mode 100644 index 00000000000..517d5cd9b94 --- /dev/null +++ b/apps/sim/hooks/queries/utils/folder-keys.ts @@ -0,0 +1,5 @@ +export const folderKeys = { + all: ['folders'] as const, + lists: () => [...folderKeys.all, 'list'] as const, + list: (workspaceId: string | undefined) => [...folderKeys.lists(), workspaceId ?? ''] as const, +} diff --git a/apps/sim/hooks/queries/utils/invalidate-workflow-lists.test.ts b/apps/sim/hooks/queries/utils/invalidate-workflow-lists.test.ts new file mode 100644 index 00000000000..6c5963840ae --- /dev/null +++ b/apps/sim/hooks/queries/utils/invalidate-workflow-lists.test.ts @@ -0,0 +1,25 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it, vi } from 'vitest' +import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists' + +describe('invalidateWorkflowLists', () => { + it('invalidates scoped workflow lists and workflow selector caches', async () => { + const queryClient = { + invalidateQueries: vi.fn().mockResolvedValue(undefined), + } + + await invalidateWorkflowLists(queryClient as any, 'ws-1', ['active', 'archived']) + + expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(1, { + queryKey: ['workflows', 'list', 'ws-1', 'active'], + }) + expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(2, { + queryKey: ['workflows', 'list', 'ws-1', 'archived'], + }) + expect(queryClient.invalidateQueries).toHaveBeenNthCalledWith(3, { + queryKey: ['selectors', 'sim.workflows', 'ws-1'], + }) + }) +}) diff --git a/apps/sim/hooks/queries/utils/invalidate-workflow-lists.ts b/apps/sim/hooks/queries/utils/invalidate-workflow-lists.ts new file mode 100644 index 00000000000..f04e19aa9b8 --- /dev/null +++ b/apps/sim/hooks/queries/utils/invalidate-workflow-lists.ts @@ -0,0 +1,25 @@ +import type { QueryClient } from '@tanstack/react-query' +import { type WorkflowQueryScope, workflowKeys } from '@/hooks/queries/utils/workflow-keys' +import { selectorKeys } from '@/hooks/selectors/query-keys' + +export async function invalidateWorkflowSelectors(queryClient: QueryClient, workspaceId: string) { + await queryClient.invalidateQueries({ queryKey: selectorKeys.simWorkflowsPrefix(workspaceId) }) +} + +/** + * Invalidates workflow list consumers for a single workspace. + */ +export async function invalidateWorkflowLists( + queryClient: QueryClient, + workspaceId: string, + scopes: WorkflowQueryScope[] = ['active'] +) { + const uniqueScopes = [...new Set(scopes)] + + await Promise.all([ + ...uniqueScopes.map((scope) => + queryClient.invalidateQueries({ queryKey: workflowKeys.list(workspaceId, scope) }) + ), + invalidateWorkflowSelectors(queryClient, workspaceId), + ]) +} diff --git a/apps/sim/hooks/queries/utils/optimistic-mutation.ts b/apps/sim/hooks/queries/utils/optimistic-mutation.ts index 47482e7ffa9..27f45ff26b6 100644 --- a/apps/sim/hooks/queries/utils/optimistic-mutation.ts +++ b/apps/sim/hooks/queries/utils/optimistic-mutation.ts @@ -6,12 +6,12 @@ const logger = createLogger('OptimisticMutation') export interface OptimisticMutationConfig { name: string getQueryKey: (variables: TVariables) => readonly unknown[] - getSnapshot: () => Record + getSnapshot: (variables: TVariables) => Record generateTempId: (variables: TVariables) => string createOptimisticItem: (variables: TVariables, tempId: string) => TItem applyOptimisticUpdate: (tempId: string, item: TItem) => void replaceOptimisticEntry: (tempId: string, data: TData) => void - rollback: (snapshot: Record) => void + rollback: (snapshot: Record, variables: TVariables) => void onSuccessExtra?: (data: TData, variables: TVariables) => void } @@ -40,7 +40,7 @@ export function createOptimisticMutationHandlers( onMutate: async (variables: TVariables): Promise> => { const queryKey = getQueryKey(variables) await queryClient.cancelQueries({ queryKey }) - const previousState = getSnapshot() + const previousState = getSnapshot(variables) const tempId = generateTempId(variables) const optimisticItem = createOptimisticItem(variables, tempId) applyOptimisticUpdate(tempId, optimisticItem) @@ -61,7 +61,7 @@ export function createOptimisticMutationHandlers( ) => { logger.error(`[${name}] Failed:`, error) if (context?.previousState) { - rollback(context.previousState) + rollback(context.previousState, _variables) logger.info(`[${name}] Rolled back to previous state`) } }, diff --git a/apps/sim/hooks/queries/utils/workflow-cache.test.ts b/apps/sim/hooks/queries/utils/workflow-cache.test.ts new file mode 100644 index 00000000000..a3ccf8da645 --- /dev/null +++ b/apps/sim/hooks/queries/utils/workflow-cache.test.ts @@ -0,0 +1,46 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { getQueryDataMock } = vi.hoisted(() => ({ + getQueryDataMock: vi.fn(), +})) + +vi.mock('@/app/_shell/providers/get-query-client', () => ({ + getQueryClient: vi.fn(() => ({ + getQueryData: getQueryDataMock, + })), +})) + +import { getWorkflowById, getWorkflows } from '@/hooks/queries/utils/workflow-cache' + +describe('getWorkflows', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('reads the active workflow list from the cache', () => { + const workflows = [{ id: 'wf-1', name: 'Workflow 1' }] + getQueryDataMock.mockReturnValue(workflows) + + expect(getWorkflows('ws-1')).toBe(workflows) + expect(getQueryDataMock).toHaveBeenCalledWith(['workflows', 'list', 'ws-1', 'active']) + }) + + it('supports alternate workflow scopes', () => { + getQueryDataMock.mockReturnValue([]) + + getWorkflows('ws-2', 'archived') + + expect(getQueryDataMock).toHaveBeenCalledWith(['workflows', 'list', 'ws-2', 'archived']) + }) + + it('reads a single workflow by id from the cache', () => { + const workflows = [{ id: 'wf-1', name: 'Workflow 1' }] + getQueryDataMock.mockReturnValue(workflows) + + expect(getWorkflowById('ws-1', 'wf-1')).toEqual(workflows[0]) + expect(getWorkflowById('ws-1', 'missing')).toBeUndefined() + }) +}) diff --git a/apps/sim/hooks/queries/utils/workflow-cache.ts b/apps/sim/hooks/queries/utils/workflow-cache.ts new file mode 100644 index 00000000000..c810a236f87 --- /dev/null +++ b/apps/sim/hooks/queries/utils/workflow-cache.ts @@ -0,0 +1,29 @@ +import { getQueryClient } from '@/app/_shell/providers/get-query-client' +import { type WorkflowQueryScope, workflowKeys } from '@/hooks/queries/utils/workflow-keys' +import type { WorkflowMetadata } from '@/stores/workflows/registry/types' + +const EMPTY_WORKFLOWS: WorkflowMetadata[] = [] + +/** + * Reads workflow metadata for a workspace directly from the React Query cache. + */ +export function getWorkflows( + workspaceId: string, + scope: WorkflowQueryScope = 'active' +): WorkflowMetadata[] { + return ( + getQueryClient().getQueryData(workflowKeys.list(workspaceId, scope)) ?? + EMPTY_WORKFLOWS + ) +} + +/** + * Reads a single workflow by id from the React Query cache. + */ +export function getWorkflowById( + workspaceId: string, + workflowId: string, + scope: WorkflowQueryScope = 'active' +): WorkflowMetadata | undefined { + return getWorkflows(workspaceId, scope).find((workflow) => workflow.id === workflowId) +} diff --git a/apps/sim/hooks/queries/utils/workflow-keys.ts b/apps/sim/hooks/queries/utils/workflow-keys.ts new file mode 100644 index 00000000000..8512e02a42b --- /dev/null +++ b/apps/sim/hooks/queries/utils/workflow-keys.ts @@ -0,0 +1,13 @@ +export type WorkflowQueryScope = 'active' | 'archived' | 'all' + +export const workflowKeys = { + all: ['workflows'] as const, + lists: () => [...workflowKeys.all, 'list'] as const, + list: (workspaceId: string | undefined, scope: WorkflowQueryScope = 'active') => + [...workflowKeys.lists(), workspaceId ?? '', scope] as const, + deploymentVersions: () => [...workflowKeys.all, 'deploymentVersion'] as const, + deploymentVersion: (workflowId: string | undefined, version: number | undefined) => + [...workflowKeys.deploymentVersions(), workflowId ?? '', version ?? 0] as const, + state: (workflowId: string | undefined) => + [...workflowKeys.all, 'state', workflowId ?? ''] as const, +} diff --git a/apps/sim/hooks/queries/utils/workflow-list-query.ts b/apps/sim/hooks/queries/utils/workflow-list-query.ts new file mode 100644 index 00000000000..63b1b6abc92 --- /dev/null +++ b/apps/sim/hooks/queries/utils/workflow-list-query.ts @@ -0,0 +1,61 @@ +import type { QueryFunctionContext } from '@tanstack/react-query' +import { type WorkflowQueryScope, workflowKeys } from '@/hooks/queries/utils/workflow-keys' +import type { WorkflowMetadata } from '@/stores/workflows/registry/types' + +interface WorkflowApiRow { + id: string + name: string + description?: string | null + color: string + workspaceId: string + folderId?: string | null + sortOrder?: number | null + createdAt: string + updatedAt?: string | null + archivedAt?: string | null +} + +export const WORKFLOW_LIST_STALE_TIME = 60 * 1000 + +export function mapWorkflow(workflow: WorkflowApiRow): WorkflowMetadata { + return { + id: workflow.id, + name: workflow.name, + description: workflow.description ?? undefined, + color: workflow.color, + workspaceId: workflow.workspaceId, + folderId: workflow.folderId ?? null, + sortOrder: workflow.sortOrder ?? 0, + createdAt: new Date(workflow.createdAt), + lastModified: new Date(workflow.updatedAt || workflow.createdAt), + archivedAt: workflow.archivedAt ? new Date(workflow.archivedAt) : null, + } +} + +export async function fetchWorkflows( + workspaceId: string, + scope: WorkflowQueryScope = 'active', + signal?: AbortSignal +): Promise { + const response = await fetch(`/api/workflows?workspaceId=${workspaceId}&scope=${scope}`, { + signal, + }) + + if (!response.ok) { + throw new Error('Failed to fetch workflows') + } + + const { data }: { data: WorkflowApiRow[] } = await response.json() + return data.map(mapWorkflow) +} + +export function getWorkflowListQueryOptions( + workspaceId: string, + scope: WorkflowQueryScope = 'active' +) { + return { + queryKey: workflowKeys.list(workspaceId, scope), + queryFn: ({ signal }: QueryFunctionContext) => fetchWorkflows(workspaceId, scope, signal), + staleTime: WORKFLOW_LIST_STALE_TIME, + } +} diff --git a/apps/sim/hooks/queries/workflows.ts b/apps/sim/hooks/queries/workflows.ts index c2ae3a40363..f850eae3f40 100644 --- a/apps/sim/hooks/queries/workflows.ts +++ b/apps/sim/hooks/queries/workflows.ts @@ -1,14 +1,29 @@ -import { useEffect } from 'react' +/** + * React Query hooks for managing workflow metadata and mutations. + */ + import { createLogger } from '@sim/logger' -import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' +import { + keepPreviousData, + skipToken, + useMutation, + useQuery, + useQueryClient, +} from '@tanstack/react-query' import { getNextWorkflowColor } from '@/lib/workflows/colors' import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults' import { deploymentKeys } from '@/hooks/queries/deployments' -import { - createOptimisticMutationHandlers, - generateTempId, -} from '@/hooks/queries/utils/optimistic-mutation' +import { fetchDeploymentVersionState } from '@/hooks/queries/utils/fetch-deployment-version-state' +import { getFolderMap } from '@/hooks/queries/utils/folder-cache' +import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists' import { getTopInsertionSortOrder } from '@/hooks/queries/utils/top-insertion-sort-order' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' +import { type WorkflowQueryScope, workflowKeys } from '@/hooks/queries/utils/workflow-keys' +import { + getWorkflowListQueryOptions, + mapWorkflow, + WORKFLOW_LIST_STALE_TIME, +} from '@/hooks/queries/utils/workflow-list-query' import { useFolderStore } from '@/stores/folders/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import type { WorkflowMetadata } from '@/stores/workflows/registry/types' @@ -18,24 +33,8 @@ import type { WorkflowState } from '@/stores/workflows/workflow/types' const logger = createLogger('WorkflowQueries') -type WorkflowQueryScope = 'active' | 'archived' | 'all' - -export const workflowKeys = { - all: ['workflows'] as const, - lists: () => [...workflowKeys.all, 'list'] as const, - list: (workspaceId: string | undefined, scope: WorkflowQueryScope = 'active') => - [...workflowKeys.lists(), workspaceId ?? '', scope] as const, - deploymentVersions: () => [...workflowKeys.all, 'deploymentVersion'] as const, - deploymentVersion: (workflowId: string | undefined, version: number | undefined) => - [...workflowKeys.deploymentVersions(), workflowId ?? '', version ?? 0] as const, - state: (workflowId: string | undefined) => - [...workflowKeys.all, 'state', workflowId ?? ''] as const, -} +export { type WorkflowQueryScope, workflowKeys } from '@/hooks/queries/utils/workflow-keys' -/** - * Fetches workflow state from the API. - * Used as the base query for both state preview and input fields extraction. - */ async function fetchWorkflowState( workflowId: string, signal?: AbortSignal @@ -47,113 +46,44 @@ async function fetchWorkflowState( } /** - * Hook to fetch workflow state. + * Fetches the full workflow state for a single workflow. * Used by workflow blocks to show a preview of the child workflow * and as a base query for input fields extraction. - * - * @param workflowId - The workflow ID to fetch state for - * @returns Query result with workflow state */ export function useWorkflowState(workflowId: string | undefined) { return useQuery({ queryKey: workflowKeys.state(workflowId), - queryFn: ({ signal }) => fetchWorkflowState(workflowId!, signal), - enabled: Boolean(workflowId), - staleTime: 30 * 1000, // 30 seconds - placeholderData: keepPreviousData, + queryFn: workflowId ? ({ signal }) => fetchWorkflowState(workflowId, signal) : skipToken, + staleTime: 30 * 1000, }) } -function mapWorkflow(workflow: any): WorkflowMetadata { - return { - id: workflow.id, - name: workflow.name, - description: workflow.description, - color: workflow.color, - workspaceId: workflow.workspaceId, - folderId: workflow.folderId, - sortOrder: workflow.sortOrder ?? 0, - createdAt: new Date(workflow.createdAt), - lastModified: new Date(workflow.updatedAt || workflow.createdAt), - archivedAt: workflow.archivedAt ? new Date(workflow.archivedAt) : null, - } -} +export function useWorkflows(workspaceId?: string, options?: { scope?: WorkflowQueryScope }) { + const { scope = 'active' } = options || {} -async function fetchWorkflows( - workspaceId: string, - scope: WorkflowQueryScope = 'active', - signal?: AbortSignal -): Promise { - const response = await fetch(`/api/workflows?workspaceId=${workspaceId}&scope=${scope}`, { - signal, + return useQuery({ + queryKey: workflowKeys.list(workspaceId, scope), + queryFn: workspaceId ? getWorkflowListQueryOptions(workspaceId, scope).queryFn : skipToken, + placeholderData: keepPreviousData, + staleTime: WORKFLOW_LIST_STALE_TIME, }) - - if (!response.ok) { - throw new Error('Failed to fetch workflows') - } - - const { data }: { data: any[] } = await response.json() - return data.map(mapWorkflow) } -export function useWorkflows( - workspaceId?: string, - options?: { syncRegistry?: boolean; scope?: WorkflowQueryScope } -) { - const { syncRegistry = true, scope = 'active' } = options || {} - const beginMetadataLoad = useWorkflowRegistry((state) => state.beginMetadataLoad) - const completeMetadataLoad = useWorkflowRegistry((state) => state.completeMetadataLoad) - const failMetadataLoad = useWorkflowRegistry((state) => state.failMetadataLoad) +/** + * Returns workflows as a `Record` keyed by ID. + * Uses the `select` option so the transformation runs inside React Query + * with structural sharing — components only re-render when the record changes. + */ +export function useWorkflowMap(workspaceId?: string, options?: { scope?: WorkflowQueryScope }) { + const { scope = 'active' } = options || {} - const query = useQuery({ + return useQuery({ queryKey: workflowKeys.list(workspaceId, scope), - queryFn: ({ signal }) => fetchWorkflows(workspaceId as string, scope, signal), - enabled: Boolean(workspaceId), + queryFn: workspaceId ? getWorkflowListQueryOptions(workspaceId, scope).queryFn : skipToken, placeholderData: keepPreviousData, - staleTime: 60 * 1000, + staleTime: WORKFLOW_LIST_STALE_TIME, + select: (data) => Object.fromEntries(data.map((w) => [w.id, w])), }) - - useEffect(() => { - if ( - syncRegistry && - scope === 'active' && - workspaceId && - (query.status === 'pending' || query.isPlaceholderData) - ) { - beginMetadataLoad(workspaceId) - } - }, [syncRegistry, scope, workspaceId, query.status, query.isPlaceholderData, beginMetadataLoad]) - - useEffect(() => { - if ( - syncRegistry && - scope === 'active' && - workspaceId && - query.status === 'success' && - query.data && - !query.isPlaceholderData - ) { - completeMetadataLoad(workspaceId, query.data) - } - }, [ - syncRegistry, - scope, - workspaceId, - query.status, - query.data, - query.isPlaceholderData, - completeMetadataLoad, - ]) - - useEffect(() => { - if (syncRegistry && scope === 'active' && workspaceId && query.status === 'error') { - const message = - query.error instanceof Error ? query.error.message : 'Failed to fetch workflows' - failMetadataLoad(workspaceId, message) - } - }, [syncRegistry, scope, workspaceId, query.status, query.error, failMetadataLoad]) - - return query } interface CreateWorkflowVariables { @@ -177,128 +107,9 @@ interface CreateWorkflowResult { sortOrder: number } -interface DuplicateWorkflowVariables { - workspaceId: string - sourceId: string - name: string - description?: string - color: string - folderId?: string | null - newId?: string -} - -interface DuplicateWorkflowResult { - id: string - name: string - description?: string - color: string - workspaceId: string - folderId?: string | null - sortOrder: number - blocksCount: number - edgesCount: number - subflowsCount: number -} - -/** - * Creates optimistic mutation handlers for workflow operations - */ -function createWorkflowMutationHandlers( - queryClient: ReturnType, - name: string, - createOptimisticWorkflow: (variables: TVariables, tempId: string) => WorkflowMetadata, - customGenerateTempId?: (variables: TVariables) => string -) { - return createOptimisticMutationHandlers< - CreateWorkflowResult | DuplicateWorkflowResult, - TVariables, - WorkflowMetadata - >(queryClient, { - name, - getQueryKey: (variables) => workflowKeys.list(variables.workspaceId, 'active'), - getSnapshot: () => ({ ...useWorkflowRegistry.getState().workflows }), - generateTempId: customGenerateTempId ?? (() => generateTempId('temp-workflow')), - createOptimisticItem: createOptimisticWorkflow, - applyOptimisticUpdate: (tempId, item) => { - useWorkflowRegistry.setState((state) => ({ - workflows: { ...state.workflows, [tempId]: item }, - })) - }, - replaceOptimisticEntry: (tempId, data) => { - useWorkflowRegistry.setState((state) => { - const { [tempId]: _, ...remainingWorkflows } = state.workflows - return { - workflows: { - ...remainingWorkflows, - [data.id]: { - id: data.id, - name: data.name, - lastModified: new Date(), - createdAt: new Date(), - description: data.description, - color: data.color, - workspaceId: data.workspaceId, - folderId: data.folderId, - sortOrder: 'sortOrder' in data ? data.sortOrder : 0, - }, - }, - error: null, - } - }) - - if (tempId !== data.id) { - useFolderStore.setState((state) => { - const selectedWorkflows = new Set(state.selectedWorkflows) - if (selectedWorkflows.has(tempId)) { - selectedWorkflows.delete(tempId) - selectedWorkflows.add(data.id) - } - return { selectedWorkflows } - }) - } - }, - rollback: (snapshot) => { - useWorkflowRegistry.setState({ workflows: snapshot }) - }, - }) -} - export function useCreateWorkflow() { const queryClient = useQueryClient() - const handlers = createWorkflowMutationHandlers( - queryClient, - 'CreateWorkflow', - (variables, tempId) => { - let sortOrder: number - if (variables.sortOrder !== undefined) { - sortOrder = variables.sortOrder - } else { - const currentWorkflows = useWorkflowRegistry.getState().workflows - const currentFolders = useFolderStore.getState().folders - sortOrder = getTopInsertionSortOrder( - currentWorkflows, - currentFolders, - variables.workspaceId, - variables.folderId - ) - } - - return { - id: tempId, - name: variables.name || generateCreativeWorkflowName(), - lastModified: new Date(), - createdAt: new Date(), - description: variables.description || 'New workflow', - color: variables.color || getNextWorkflowColor(), - workspaceId: variables.workspaceId, - folderId: variables.folderId || null, - sortOrder, - } - }, - (variables) => variables.id ?? crypto.randomUUID() - ) - return useMutation({ mutationFn: async (variables: CreateWorkflowVariables): Promise => { const { workspaceId, name, description, color, folderId, sortOrder, id, deduplicate } = @@ -343,9 +154,7 @@ export function useCreateWorkflow() { if (!stateResponse.ok) { const text = await stateResponse.text() - logger.error('Failed to persist default Start block:', text) - } else { - logger.info('Successfully persisted default Start block') + logger.error('Failed to persist default workflow state:', text) } return { @@ -358,9 +167,85 @@ export function useCreateWorkflow() { sortOrder: createdWorkflow.sortOrder ?? 0, } }, - ...handlers, + onMutate: async (variables) => { + await queryClient.cancelQueries({ + queryKey: workflowKeys.list(variables.workspaceId, 'active'), + }) + + const snapshot = queryClient.getQueryData( + workflowKeys.list(variables.workspaceId, 'active') + ) + + const tempId = variables.id ?? crypto.randomUUID() + let sortOrder: number + if (variables.sortOrder !== undefined) { + sortOrder = variables.sortOrder + } else { + const currentWorkflows = Object.fromEntries( + getWorkflows(variables.workspaceId).map((w) => [w.id, w]) + ) + sortOrder = getTopInsertionSortOrder( + currentWorkflows, + getFolderMap(variables.workspaceId), + variables.workspaceId, + variables.folderId + ) + } + + const optimistic: WorkflowMetadata = { + id: tempId, + name: variables.name || generateCreativeWorkflowName(), + lastModified: new Date(), + createdAt: new Date(), + description: variables.description || 'New workflow', + color: variables.color || getNextWorkflowColor(), + workspaceId: variables.workspaceId, + folderId: variables.folderId || null, + sortOrder, + } + + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + (old) => [...(old ?? []), optimistic] + ) + logger.info(`[CreateWorkflow] Added optimistic entry: ${tempId}`) + + return { snapshot, tempId } + }, onSuccess: (data, variables, context) => { - handlers.onSuccess(data, variables, context) + if (!context) return + const { tempId } = context + + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + (old) => + (old ?? []).map((w) => + w.id === tempId + ? { + id: data.id, + name: data.name, + lastModified: new Date(), + createdAt: new Date(), + description: data.description, + color: data.color, + workspaceId: data.workspaceId, + folderId: data.folderId, + sortOrder: data.sortOrder, + } + : w + ) + ) + + if (tempId !== data.id) { + useFolderStore.setState((state) => { + const selectedWorkflows = new Set(state.selectedWorkflows) + if (selectedWorkflows.has(tempId)) { + selectedWorkflows.delete(tempId) + selectedWorkflows.add(data.id) + } + return { selectedWorkflows } + }) + } const { subBlockValues } = buildDefaultWorkflowArtifacts() useSubBlockStore.setState((state) => ({ @@ -369,40 +254,49 @@ export function useCreateWorkflow() { [data.id]: subBlockValues, }, })) + + logger.info(`[CreateWorkflow] Success, replaced temp entry ${tempId}`) + }, + onError: (_error, variables, context) => { + if (context?.snapshot) { + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + context.snapshot + ) + logger.info('[CreateWorkflow] Rolled back to previous state') + } + }, + onSettled: (_data, _error, variables) => { + return invalidateWorkflowLists(queryClient, variables.workspaceId, ['active', 'archived']) }, }) } -export function useDuplicateWorkflowMutation() { - const queryClient = useQueryClient() +interface DuplicateWorkflowVariables { + workspaceId: string + sourceId: string + name: string + description?: string + color: string + folderId?: string | null + newId?: string +} - const handlers = createWorkflowMutationHandlers( - queryClient, - 'DuplicateWorkflow', - (variables, tempId) => { - const currentWorkflows = useWorkflowRegistry.getState().workflows - const currentFolders = useFolderStore.getState().folders - const targetFolderId = variables.folderId ?? null +interface DuplicateWorkflowResult { + id: string + name: string + description?: string + color: string + workspaceId: string + folderId?: string | null + sortOrder: number + blocksCount: number + edgesCount: number + subflowsCount: number +} - return { - id: tempId, - name: variables.name, - lastModified: new Date(), - createdAt: new Date(), - description: variables.description, - color: variables.color, - workspaceId: variables.workspaceId, - folderId: targetFolderId, - sortOrder: getTopInsertionSortOrder( - currentWorkflows, - currentFolders, - variables.workspaceId, - targetFolderId - ), - } - }, - (variables) => variables.newId ?? crypto.randomUUID() - ) +export function useDuplicateWorkflowMutation() { + const queryClient = useQueryClient() return useMutation({ mutationFn: async (variables: DuplicateWorkflowVariables): Promise => { @@ -449,11 +343,81 @@ export function useDuplicateWorkflowMutation() { subflowsCount: duplicatedWorkflow.subflowsCount || 0, } }, - ...handlers, + onMutate: async (variables) => { + await queryClient.cancelQueries({ + queryKey: workflowKeys.list(variables.workspaceId, 'active'), + }) + + const snapshot = queryClient.getQueryData( + workflowKeys.list(variables.workspaceId, 'active') + ) + const tempId = variables.newId ?? crypto.randomUUID() + + const currentWorkflows = Object.fromEntries( + getWorkflows(variables.workspaceId).map((w) => [w.id, w]) + ) + const targetFolderId = variables.folderId ?? null + + const optimistic: WorkflowMetadata = { + id: tempId, + name: variables.name, + lastModified: new Date(), + createdAt: new Date(), + description: variables.description, + color: variables.color, + workspaceId: variables.workspaceId, + folderId: targetFolderId, + sortOrder: getTopInsertionSortOrder( + currentWorkflows, + getFolderMap(variables.workspaceId), + variables.workspaceId, + targetFolderId + ), + } + + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + (old) => [...(old ?? []), optimistic] + ) + logger.info(`[DuplicateWorkflow] Added optimistic entry: ${tempId}`) + + return { snapshot, tempId } + }, onSuccess: (data, variables, context) => { - handlers.onSuccess(data, variables, context) + if (!context) return + const { tempId } = context + + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + (old) => + (old ?? []).map((w) => + w.id === tempId + ? { + id: data.id, + name: data.name, + lastModified: new Date(), + createdAt: new Date(), + description: data.description, + color: data.color, + workspaceId: data.workspaceId, + folderId: data.folderId, + sortOrder: data.sortOrder, + } + : w + ) + ) + + if (tempId !== data.id) { + useFolderStore.setState((state) => { + const selectedWorkflows = new Set(state.selectedWorkflows) + if (selectedWorkflows.has(tempId)) { + selectedWorkflows.delete(tempId) + selectedWorkflows.add(data.id) + } + return { selectedWorkflows } + }) + } - // Copy subblock values from source if it's the active workflow const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId if (variables.sourceId === activeWorkflowId) { const sourceSubblockValues = @@ -465,48 +429,143 @@ export function useDuplicateWorkflowMutation() { }, })) } + + logger.info(`[DuplicateWorkflow] Success, replaced temp entry ${tempId}`) + }, + onError: (_error, variables, context) => { + if (context?.snapshot) { + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + context.snapshot + ) + logger.info('[DuplicateWorkflow] Rolled back to previous state') + } + }, + onSettled: (_data, _error, variables) => { + return invalidateWorkflowLists(queryClient, variables.workspaceId) }, }) } -interface DeploymentVersionStateResponse { - deployedState: WorkflowState +interface UpdateWorkflowVariables { + workspaceId: string + workflowId: string + metadata: Partial } -/** - * Fetches the deployed state for a specific deployment version. - * Exported for reuse in other query hooks. - */ -export async function fetchDeploymentVersionState( - workflowId: string, - version: number, - signal?: AbortSignal -): Promise { - const response = await fetch(`/api/workflows/${workflowId}/deployments/${version}`, { signal }) +export function useUpdateWorkflow() { + const queryClient = useQueryClient() - if (!response.ok) { - throw new Error(`Failed to fetch deployment version: ${response.statusText}`) - } + return useMutation({ + mutationFn: async (variables: UpdateWorkflowVariables) => { + const response = await fetch(`/api/workflows/${variables.workflowId}`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(variables.metadata), + }) - const data: DeploymentVersionStateResponse = await response.json() - if (!data.deployedState) { - throw new Error('No deployed state returned') - } + if (!response.ok) { + const error = await response.json() + throw new Error(error.error || 'Failed to update workflow') + } + + const { workflow: updatedWorkflow } = await response.json() + return mapWorkflow(updatedWorkflow) + }, + onMutate: async (variables) => { + await queryClient.cancelQueries({ + queryKey: workflowKeys.list(variables.workspaceId, 'active'), + }) - return data.deployedState + const snapshot = queryClient.getQueryData( + workflowKeys.list(variables.workspaceId, 'active') + ) + + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + (old) => + (old ?? []).map((w) => + w.id === variables.workflowId + ? { ...w, ...variables.metadata, lastModified: new Date() } + : w + ) + ) + + return { snapshot } + }, + onError: (_error, variables, context) => { + if (context?.snapshot) { + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + context.snapshot + ) + } + }, + onSettled: (_data, _error, variables) => { + return invalidateWorkflowLists(queryClient, variables.workspaceId) + }, + }) +} + +interface DeleteWorkflowVariables { + workspaceId: string + workflowId: string +} + +export function useDeleteWorkflowMutation() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: async (variables: DeleteWorkflowVariables) => { + const response = await fetch(`/api/workflows/${variables.workflowId}`, { + method: 'DELETE', + }) + + if (!response.ok) { + const error = await response.json().catch(() => ({ error: 'Unknown error' })) + throw new Error(error.error || 'Failed to delete workflow') + } + + logger.info(`Successfully deleted workflow ${variables.workflowId} from database`) + }, + onMutate: async (variables) => { + await queryClient.cancelQueries({ + queryKey: workflowKeys.list(variables.workspaceId, 'active'), + }) + + const snapshot = queryClient.getQueryData( + workflowKeys.list(variables.workspaceId, 'active') + ) + + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + (old) => (old ?? []).filter((w) => w.id !== variables.workflowId) + ) + + return { snapshot } + }, + onError: (_error, variables, context) => { + if (context?.snapshot) { + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + context.snapshot + ) + } + }, + onSettled: (_data, _error, variables) => { + return invalidateWorkflowLists(queryClient, variables.workspaceId, ['active', 'archived']) + }, + }) } -/** - * Hook for fetching the workflow state of a specific deployment version. - * Used in the deploy modal to preview historical versions. - */ export function useDeploymentVersionState(workflowId: string | null, version: number | null) { return useQuery({ queryKey: workflowKeys.deploymentVersion(workflowId ?? undefined, version ?? undefined), - queryFn: ({ signal }) => - fetchDeploymentVersionState(workflowId as string, version as number, signal), - enabled: Boolean(workflowId) && version !== null, - staleTime: 5 * 60 * 1000, // 5 minutes - deployment versions don't change + queryFn: + workflowId && version !== null + ? ({ signal }) => fetchDeploymentVersionState(workflowId, version, signal) + : skipToken, + staleTime: 5 * 60 * 1000, }) } @@ -515,9 +574,6 @@ interface RevertToVersionVariables { version: number } -/** - * Mutation hook for reverting (loading) a deployment version into the current workflow. - */ export function useRevertToVersion() { const queryClient = useQueryClient() @@ -531,7 +587,7 @@ export function useRevertToVersion() { throw new Error('Failed to load deployment') } }, - onSuccess: (_data, variables) => { + onSettled: (_data, _error, variables) => { queryClient.invalidateQueries({ queryKey: workflowKeys.state(variables.workflowId), }) @@ -574,41 +630,45 @@ export function useReorderWorkflows() { } }, onMutate: async (variables) => { - await queryClient.cancelQueries({ queryKey: workflowKeys.lists() }) - - const snapshot = { ...useWorkflowRegistry.getState().workflows } + await queryClient.cancelQueries({ + queryKey: workflowKeys.list(variables.workspaceId, 'active'), + }) - useWorkflowRegistry.setState((state) => { - const updated = { ...state.workflows } - for (const update of variables.updates) { - if (updated[update.id]) { - updated[update.id] = { - ...updated[update.id], + const snapshot = queryClient.getQueryData( + workflowKeys.list(variables.workspaceId, 'active') + ) + + const updateMap = new Map(variables.updates.map((u) => [u.id, u])) + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + (old) => + (old ?? []).map((w) => { + const update = updateMap.get(w.id) + if (!update) return w + return { + ...w, sortOrder: update.sortOrder, - folderId: - update.folderId !== undefined ? update.folderId : updated[update.id].folderId, + folderId: update.folderId !== undefined ? update.folderId : w.folderId, } - } - } - return { workflows: updated } - }) + }) + ) return { snapshot } }, - onError: (_error, _variables, context) => { + onError: (_error, variables, context) => { if (context?.snapshot) { - useWorkflowRegistry.setState({ workflows: context.snapshot }) + queryClient.setQueryData( + workflowKeys.list(variables.workspaceId, 'active'), + context.snapshot + ) } }, onSettled: (_data, _error, variables) => { - queryClient.invalidateQueries({ queryKey: workflowKeys.lists() }) + return invalidateWorkflowLists(queryClient, variables.workspaceId) }, }) } -/** - * Import workflow mutation (superuser debug) - */ interface ImportWorkflowParams { workflowId: string targetWorkspaceId: string @@ -641,8 +701,8 @@ export function useImportWorkflow() { return data }, - onSuccess: (_data, variables) => { - queryClient.invalidateQueries({ queryKey: workflowKeys.lists() }) + onSettled: (_data, _error, variables) => { + return invalidateWorkflowLists(queryClient, variables.targetWorkspaceId) }, }) } @@ -651,7 +711,7 @@ export function useRestoreWorkflow() { const queryClient = useQueryClient() return useMutation({ - mutationFn: async (workflowId: string) => { + mutationFn: async ({ workflowId }: { workflowId: string; workspaceId: string }) => { const res = await fetch(`/api/workflows/${workflowId}/restore`, { method: 'POST' }) if (!res.ok) { const data = await res.json().catch(() => ({})) @@ -659,8 +719,8 @@ export function useRestoreWorkflow() { } return res.json() }, - onSettled: () => { - queryClient.invalidateQueries({ queryKey: workflowKeys.lists() }) + onSettled: (_data, _error, variables) => { + return invalidateWorkflowLists(queryClient, variables.workspaceId, ['active', 'archived']) }, }) } diff --git a/apps/sim/hooks/queries/workspace-files.ts b/apps/sim/hooks/queries/workspace-files.ts index 074ed3b8c8d..befea6e7f65 100644 --- a/apps/sim/hooks/queries/workspace-files.ts +++ b/apps/sim/hooks/queries/workspace-files.ts @@ -33,6 +33,23 @@ export interface StorageInfo { plan?: string } +/** + * Hook to fetch a single workspace file record by ID. + * Shares the `list(workspaceId, 'active')` query key with {@link useWorkspaceFiles} so no extra + * network request is made when the list is already cached (warm path). + * On a cold path (e.g. direct navigation to a file URL), this fetches the full active file list + * for the workspace and selects the matching record via `select`. + */ +export function useWorkspaceFileRecord(workspaceId: string, fileId: string) { + return useQuery({ + queryKey: workspaceFilesKeys.list(workspaceId, 'active'), + queryFn: ({ signal }) => fetchWorkspaceFiles(workspaceId, 'active', signal), + enabled: !!workspaceId && !!fileId, + staleTime: 30 * 1000, + select: (files) => files.find((f) => f.id === fileId) ?? null, + }) +} + /** * Fetch workspace files from API */ diff --git a/apps/sim/hooks/selectors/query-keys.ts b/apps/sim/hooks/selectors/query-keys.ts new file mode 100644 index 00000000000..c5fa1afe97a --- /dev/null +++ b/apps/sim/hooks/selectors/query-keys.ts @@ -0,0 +1,7 @@ +export const selectorKeys = { + all: ['selectors'] as const, + simWorkflowsPrefix: (workspaceId: string) => + [...selectorKeys.all, 'sim.workflows', workspaceId] as const, + simWorkflows: (workspaceId: string, excludeWorkflowId?: string) => + [...selectorKeys.simWorkflowsPrefix(workspaceId), excludeWorkflowId ?? 'none'] as const, +} diff --git a/apps/sim/hooks/selectors/registry.test.ts b/apps/sim/hooks/selectors/registry.test.ts new file mode 100644 index 00000000000..fe3d596fac0 --- /dev/null +++ b/apps/sim/hooks/selectors/registry.test.ts @@ -0,0 +1,84 @@ +/** + * @vitest-environment node + */ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockEnsureQueryData, mockGetWorkflows } = vi.hoisted(() => ({ + mockEnsureQueryData: vi.fn().mockResolvedValue(undefined), + mockGetWorkflows: vi.fn(), +})) + +vi.mock('@/app/_shell/providers/get-query-client', () => ({ + getQueryClient: vi.fn(() => ({ + ensureQueryData: mockEnsureQueryData, + })), +})) + +vi.mock('@/hooks/queries/utils/workflow-cache', () => ({ + getWorkflows: mockGetWorkflows, + getWorkflowById: vi.fn((workspaceId: string, workflowId: string) => + mockGetWorkflows(workspaceId).find((workflow: { id: string }) => workflow.id === workflowId) + ), +})) + +vi.mock('@/hooks/queries/utils/workflow-list-query', () => ({ + getWorkflowListQueryOptions: vi.fn((workspaceId: string) => ({ + queryKey: ['workflows', 'list', workspaceId, 'active'], + })), +})) + +import { getSelectorDefinition } from '@/hooks/selectors/registry' + +describe('sim.workflows selector', () => { + beforeEach(() => { + vi.clearAllMocks() + mockGetWorkflows.mockReturnValue([ + { id: 'wf-1', name: 'Alpha Workflow' }, + { id: 'wf-2', name: 'Bravo Workflow' }, + ]) + }) + + it('requires an explicit workspaceId in selector context', () => { + const definition = getSelectorDefinition('sim.workflows') + + expect(definition.enabled?.({ key: 'sim.workflows', context: {} })).toBe(false) + expect(definition.staleTime).toBe(60_000) + expect( + definition.getQueryKey({ + key: 'sim.workflows', + context: { workspaceId: 'ws-1', excludeWorkflowId: 'wf-2' }, + }) + ).toEqual(['selectors', 'sim.workflows', 'ws-1', 'wf-2']) + }) + + it('reads workflow options from the scoped workflow cache', async () => { + const definition = getSelectorDefinition('sim.workflows') + + const options = await definition.fetchList({ + key: 'sim.workflows', + context: { workspaceId: 'ws-1', excludeWorkflowId: 'wf-2' }, + }) + + expect(mockEnsureQueryData).toHaveBeenCalledWith({ + queryKey: ['workflows', 'list', 'ws-1', 'active'], + }) + expect(mockGetWorkflows).toHaveBeenCalledWith('ws-1') + expect(options).toEqual([{ id: 'wf-1', label: 'Alpha Workflow' }]) + }) + + it('resolves workflow labels by id using the same workspace scope', async () => { + const definition = getSelectorDefinition('sim.workflows') + + const option = await definition.fetchById?.({ + key: 'sim.workflows', + context: { workspaceId: 'ws-1' }, + detailId: 'wf-2', + }) + + expect(mockEnsureQueryData).toHaveBeenCalledWith({ + queryKey: ['workflows', 'list', 'ws-1', 'active'], + }) + expect(mockGetWorkflows).toHaveBeenCalledWith('ws-1') + expect(option).toEqual({ id: 'wf-2', label: 'Bravo Workflow' }) + }) +}) diff --git a/apps/sim/hooks/selectors/registry.ts b/apps/sim/hooks/selectors/registry.ts index fd050f97a6a..be48437b7d7 100644 --- a/apps/sim/hooks/selectors/registry.ts +++ b/apps/sim/hooks/selectors/registry.ts @@ -1,4 +1,8 @@ +import { getQueryClient } from '@/app/_shell/providers/get-query-client' +import { getWorkflowById, getWorkflows } from '@/hooks/queries/utils/workflow-cache' +import { getWorkflowListQueryOptions } from '@/hooks/queries/utils/workflow-list-query' import { fetchJson, fetchOAuthToken } from '@/hooks/selectors/helpers' +import { selectorKeys } from '@/hooks/selectors/query-keys' import type { SelectorContext, SelectorDefinition, @@ -6,7 +10,6 @@ import type { SelectorOption, SelectorQueryArgs, } from '@/hooks/selectors/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' const SELECTOR_STALE = 60 * 1000 @@ -1685,27 +1688,28 @@ const registry: Record = { }, 'sim.workflows': { key: 'sim.workflows', - staleTime: 0, // Always fetch fresh from store - getQueryKey: ({ context }: SelectorQueryArgs) => [ - 'selectors', - 'sim.workflows', - context.excludeWorkflowId ?? 'none', - ], - enabled: () => true, + staleTime: SELECTOR_STALE, + getQueryKey: ({ context }: SelectorQueryArgs) => + context.workspaceId + ? selectorKeys.simWorkflows(context.workspaceId, context.excludeWorkflowId) + : [...selectorKeys.all, 'sim.workflows', 'none', context.excludeWorkflowId ?? 'none'], + enabled: ({ context }) => Boolean(context.workspaceId), fetchList: async ({ context }: SelectorQueryArgs): Promise => { - const { workflows } = useWorkflowRegistry.getState() - return Object.entries(workflows) - .filter(([id]) => id !== context.excludeWorkflowId) - .map(([id, workflow]) => ({ - id, - label: workflow.name || `Workflow ${id.slice(0, 8)}`, + if (!context.workspaceId) return [] + await getQueryClient().ensureQueryData(getWorkflowListQueryOptions(context.workspaceId)) + const workflows = getWorkflows(context.workspaceId) + return workflows + .filter((w) => w.id !== context.excludeWorkflowId) + .map((w) => ({ + id: w.id, + label: w.name || `Workflow ${w.id.slice(0, 8)}`, })) .sort((a, b) => a.label.localeCompare(b.label)) }, - fetchById: async ({ detailId }: SelectorQueryArgs): Promise => { - if (!detailId) return null - const { workflows } = useWorkflowRegistry.getState() - const workflow = workflows[detailId] + fetchById: async ({ context, detailId }: SelectorQueryArgs): Promise => { + if (!detailId || !context.workspaceId) return null + await getQueryClient().ensureQueryData(getWorkflowListQueryOptions(context.workspaceId)) + const workflow = getWorkflowById(context.workspaceId, detailId) if (!workflow) return null return { id: detailId, diff --git a/apps/sim/hooks/selectors/types.ts b/apps/sim/hooks/selectors/types.ts index 87e1572ef57..2324106a802 100644 --- a/apps/sim/hooks/selectors/types.ts +++ b/apps/sim/hooks/selectors/types.ts @@ -84,6 +84,7 @@ export interface SelectorQueryArgs { context: SelectorContext search?: string detailId?: string + signal?: AbortSignal } export interface SelectorDefinition { diff --git a/apps/sim/hooks/selectors/use-selector-query.ts b/apps/sim/hooks/selectors/use-selector-query.ts index 6486e769773..0323c7f5e7d 100644 --- a/apps/sim/hooks/selectors/use-selector-query.ts +++ b/apps/sim/hooks/selectors/use-selector-query.ts @@ -21,7 +21,7 @@ export function useSelectorOptions(key: SelectorKey, args: SelectorHookArgs) { const isEnabled = args.enabled ?? (definition.enabled ? definition.enabled(queryArgs) : true) return useQuery({ queryKey: definition.getQueryKey(queryArgs), - queryFn: () => definition.fetchList(queryArgs), + queryFn: ({ signal }) => definition.fetchList({ ...queryArgs, signal }), enabled: isEnabled, staleTime: definition.staleTime ?? 30_000, }) @@ -60,7 +60,7 @@ export function useSelectorOptionDetail( const query = useQuery({ queryKey: [...definition.getQueryKey(queryArgs), 'detail', resolvedDetailId ?? 'none'], - queryFn: () => definition.fetchById!(queryArgs), + queryFn: ({ signal }) => definition.fetchById!({ ...queryArgs, signal }), enabled, staleTime: definition.staleTime ?? 300_000, }) diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts index b8b85b3cbd1..4093bed8b20 100644 --- a/apps/sim/hooks/use-collaborative-workflow.ts +++ b/apps/sim/hooks/use-collaborative-workflow.ts @@ -1281,7 +1281,7 @@ export function useCollaborativeWorkflow() { // Immediate tag selection (uses queue but processes immediately, no debouncing) const collaborativeSetTagSelection = useCallback( - (blockId: string, subblockId: string, value: any) => { + (blockId: string, subblockId: string, value: string) => { if (isApplyingRemoteChange.current) return if (isBaselineDiffView) { diff --git a/apps/sim/hooks/use-permission-config.ts b/apps/sim/hooks/use-permission-config.ts index f103aa31436..f52b8aae18c 100644 --- a/apps/sim/hooks/use-permission-config.ts +++ b/apps/sim/hooks/use-permission-config.ts @@ -30,8 +30,8 @@ interface AllowedIntegrationsResponse { function useAllowedIntegrationsFromEnv() { return useQuery({ queryKey: ['allowedIntegrations', 'env'], - queryFn: async () => { - const response = await fetch('/api/settings/allowed-integrations') + queryFn: async ({ signal }) => { + const response = await fetch('/api/settings/allowed-integrations', { signal }) if (!response.ok) return { allowedIntegrations: null } return response.json() }, diff --git a/apps/sim/lib/a2a/push-notifications.ts b/apps/sim/lib/a2a/push-notifications.ts index e2a7b4ef67a..4413b569fbf 100644 --- a/apps/sim/lib/a2a/push-notifications.ts +++ b/apps/sim/lib/a2a/push-notifications.ts @@ -114,7 +114,12 @@ export async function notifyTaskStateChange(taskId: string, state: TaskState): P const { a2aPushNotificationTask } = await import( '@/background/a2a-push-notification-delivery' ) - await a2aPushNotificationTask.trigger({ taskId, state }) + await a2aPushNotificationTask.trigger( + { taskId, state }, + { + tags: [`taskId:${taskId}`], + } + ) logger.info('Push notification queued to trigger.dev', { taskId, state }) } catch (error) { logger.warn('Failed to queue push notification, falling back to inline delivery', { diff --git a/apps/sim/lib/api-key/byok.ts b/apps/sim/lib/api-key/byok.ts index 1c45b47168e..901d86af5fe 100644 --- a/apps/sim/lib/api-key/byok.ts +++ b/apps/sim/lib/api-key/byok.ts @@ -3,6 +3,7 @@ import { workspaceBYOKKeys } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { getRotatingApiKey } from '@/lib/core/config/api-keys' +import { env } from '@/lib/core/config/env' import { isHosted } from '@/lib/core/config/feature-flags' import { decryptSecret } from '@/lib/core/security/encryption' import { getWorkspaceById } from '@/lib/workspaces/permissions/utils' @@ -69,7 +70,27 @@ export async function getApiKeyWithBYOK( const isVllmModel = provider === 'vllm' || useProvidersStore.getState().providers.vllm.models.includes(model) if (isVllmModel) { - return { apiKey: userProvidedKey || 'empty', isBYOK: false } + return { apiKey: userProvidedKey || env.VLLM_API_KEY || 'empty', isBYOK: false } + } + + const isFireworksModel = + provider === 'fireworks' || + useProvidersStore.getState().providers.fireworks.models.includes(model) + if (isFireworksModel) { + if (workspaceId) { + const byokResult = await getBYOKKey(workspaceId, 'fireworks') + if (byokResult) { + logger.info('Using BYOK key for Fireworks', { model, workspaceId }) + return byokResult + } + } + if (userProvidedKey) { + return { apiKey: userProvidedKey, isBYOK: false } + } + if (env.FIREWORKS_API_KEY) { + return { apiKey: env.FIREWORKS_API_KEY, isBYOK: false } + } + throw new Error(`API key is required for Fireworks ${model}`) } const isBedrockModel = provider === 'bedrock' || model.startsWith('bedrock/') diff --git a/apps/sim/lib/copilot/tools/client/tool-display-registry.ts b/apps/sim/lib/copilot/tools/client/tool-display-registry.ts index 617626700d7..9a9cb88ca8a 100644 --- a/apps/sim/lib/copilot/tools/client/tool-display-registry.ts +++ b/apps/sim/lib/copilot/tools/client/tool-display-registry.ts @@ -40,7 +40,8 @@ import { XCircle, Zap, } from 'lucide-react' -import { getCustomTool } from '@/hooks/queries/custom-tools' +import { getCustomTool } from '@/hooks/queries/utils/custom-tool-cache' +import { getWorkflowById } from '@/hooks/queries/utils/workflow-cache' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store' @@ -137,6 +138,15 @@ function formatDuration(seconds: number): string { return `${hours}h` } +function getScopedWorkspaceId(params: Record): string | undefined { + const paramWorkspaceId = params?.workspaceId + if (typeof paramWorkspaceId === 'string' && paramWorkspaceId.length > 0) { + return paramWorkspaceId + } + + return useWorkflowRegistry.getState().hydration.workspaceId ?? undefined +} + function toUiConfig(metadata?: ToolMetadata): ToolUIConfig | undefined { const legacy = metadata?.uiConfig const subagent = legacy?.subagent @@ -1036,13 +1046,14 @@ const META_manage_custom_tool: ToolMetadata = { }, getDynamicText: (params, state) => { const operation = params?.operation as 'add' | 'edit' | 'delete' | 'list' | undefined + const workspaceId = getScopedWorkspaceId(params) if (!operation) return undefined let toolName = params?.schema?.function?.name - if (!toolName && params?.toolId) { + if (!toolName && params?.toolId && workspaceId) { try { - const tool = getCustomTool(params.toolId) + const tool = getCustomTool(params.toolId, workspaceId) toolName = tool?.schema?.function?.name } catch { // Ignore errors accessing cache @@ -1629,8 +1640,9 @@ const META_run_workflow: ToolMetadata = { }, getDynamicText: (params, state) => { const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId - if (workflowId) { - const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name + const workspaceId = getScopedWorkspaceId(params) + if (workflowId && workspaceId) { + const workflowName = getWorkflowById(workspaceId, workflowId)?.name if (workflowName) { switch (state) { case ClientToolCallState.success: diff --git a/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts b/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts index 4d041ee6fd7..b15785f9fd4 100644 --- a/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts +++ b/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts @@ -696,14 +696,19 @@ function resolveAuthType( /** * Gets all available models from PROVIDER_DEFINITIONS as static options. * This provides fallback data when store state is not available server-side. - * Excludes dynamic providers (ollama, vllm, openrouter) which require runtime fetching. + * Excludes dynamic providers (ollama, vllm, openrouter, fireworks) which require runtime fetching. */ function getStaticModelOptions(): { id: string; label?: string }[] { const models: { id: string; label?: string }[] = [] for (const provider of Object.values(PROVIDER_DEFINITIONS)) { // Skip providers with dynamic/fetched models - if (provider.id === 'ollama' || provider.id === 'vllm' || provider.id === 'openrouter') { + if ( + provider.id === 'ollama' || + provider.id === 'vllm' || + provider.id === 'openrouter' || + provider.id === 'fireworks' + ) { continue } if (provider?.models) { @@ -737,6 +742,7 @@ function callOptionsWithFallback( ollama: { models: [] }, vllm: { models: [] }, openrouter: { models: [] }, + fireworks: { models: [] }, }, } diff --git a/apps/sim/lib/copilot/vfs/serializers.ts b/apps/sim/lib/copilot/vfs/serializers.ts index 219ea041969..e1d282d0d55 100644 --- a/apps/sim/lib/copilot/vfs/serializers.ts +++ b/apps/sim/lib/copilot/vfs/serializers.ts @@ -324,7 +324,7 @@ function getStaticModelOptionsForVFS(): Array<{ hosted: boolean }> { const hostedProviders = new Set(['openai', 'anthropic', 'google']) - const dynamicProviders = new Set(['ollama', 'vllm', 'openrouter']) + const dynamicProviders = new Set(['ollama', 'vllm', 'openrouter', 'fireworks']) const models: Array<{ id: string; provider: string; hosted: boolean }> = [] diff --git a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts index 436024e24ca..2a682ee7d1f 100644 --- a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts +++ b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts @@ -66,9 +66,10 @@ export class TriggerDevJobQueue implements JobQueueBackend { ? { ...payload, ...options.metadata } : payload - const handle = await tasks.trigger(taskId, enrichedPayload) + const tags = buildTags(options) + const handle = await tasks.trigger(taskId, enrichedPayload, tags.length > 0 ? { tags } : {}) - logger.debug('Enqueued job via trigger.dev', { jobId: handle.id, type, taskId }) + logger.debug('Enqueued job via trigger.dev', { jobId: handle.id, type, taskId, tags }) return handle.id } @@ -121,3 +122,33 @@ export class TriggerDevJobQueue implements JobQueueBackend { async markJobFailed(_jobId: string, _error: string): Promise {} } + +/** + * Derives trigger.dev tags from job type, metadata, and explicit tags. + * Tags follow the `namespace:value` convention for consistent filtering. + * Max 10 tags per run, each max 128 chars. + */ +function buildTags(options?: EnqueueOptions): string[] { + const tags: string[] = [] + const meta = options?.metadata + + if (meta?.workspaceId) tags.push(`workspaceId:${meta.workspaceId}`) + if (meta?.workflowId) tags.push(`workflowId:${meta.workflowId}`) + if (meta?.userId) tags.push(`userId:${meta.userId}`) + + if (meta?.correlation) { + const c = meta.correlation + tags.push(`source:${c.source}`) + if (c.webhookId) tags.push(`webhookId:${c.webhookId}`) + if (c.scheduleId) tags.push(`scheduleId:${c.scheduleId}`) + if (c.provider) tags.push(`provider:${c.provider}`) + } + + if (options?.tags) { + for (const tag of options.tags) { + if (!tags.includes(tag)) tags.push(tag) + } + } + + return tags.slice(0, 10) +} diff --git a/apps/sim/lib/core/async-jobs/types.ts b/apps/sim/lib/core/async-jobs/types.ts index c4bdc27c5ef..6888ab10723 100644 --- a/apps/sim/lib/core/async-jobs/types.ts +++ b/apps/sim/lib/core/async-jobs/types.ts @@ -54,6 +54,7 @@ export interface Job { export interface JobMetadata { workflowId?: string + workspaceId?: string userId?: string correlation?: AsyncExecutionCorrelation [key: string]: unknown @@ -66,6 +67,7 @@ export interface EnqueueOptions { priority?: number name?: string delayMs?: number + tags?: string[] } /** diff --git a/apps/sim/lib/core/config/env.ts b/apps/sim/lib/core/config/env.ts index 5a1dc5743d1..d58892e6887 100644 --- a/apps/sim/lib/core/config/env.ts +++ b/apps/sim/lib/core/config/env.ts @@ -105,6 +105,7 @@ export const env = createEnv({ OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL VLLM_BASE_URL: z.string().url().optional(), // vLLM self-hosted base URL (OpenAI-compatible) VLLM_API_KEY: z.string().optional(), // Optional bearer token for vLLM + FIREWORKS_API_KEY: z.string().optional(), // Optional Fireworks AI API key for model listing ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search EXA_API_KEY: z.string().min(1).optional(), // Exa AI API key for enhanced online search diff --git a/apps/sim/lib/core/config/feature-flags.ts b/apps/sim/lib/core/config/feature-flags.ts index d5fa530e52b..012d1d5e026 100644 --- a/apps/sim/lib/core/config/feature-flags.ts +++ b/apps/sim/lib/core/config/feature-flags.ts @@ -19,11 +19,17 @@ export const isDev = env.NODE_ENV === 'development' export const isTest = env.NODE_ENV === 'test' /** - * Is this the hosted version of the application - */ -export const isHosted = - getEnv('NEXT_PUBLIC_APP_URL') === 'https://www.sim.ai' || - getEnv('NEXT_PUBLIC_APP_URL') === 'https://www.staging.sim.ai' + * Is this the hosted version of the application. + * True for sim.ai and any subdomain of sim.ai (e.g. staging.sim.ai, dev.sim.ai). + */ +const appUrl = getEnv('NEXT_PUBLIC_APP_URL') +let appHostname = '' +try { + appHostname = appUrl ? new URL(appUrl).hostname : '' +} catch { + // invalid URL — isHosted stays false +} +export const isHosted = appHostname === 'sim.ai' || appHostname.endsWith('.sim.ai') /** * Is billing enforcement enabled diff --git a/apps/sim/lib/core/utils/optimistic-update.ts b/apps/sim/lib/core/utils/optimistic-update.ts deleted file mode 100644 index 4759255e4db..00000000000 --- a/apps/sim/lib/core/utils/optimistic-update.ts +++ /dev/null @@ -1,103 +0,0 @@ -import { createLogger } from '@sim/logger' - -const logger = createLogger('OptimisticUpdate') - -/** - * Options for performing an optimistic update with automatic rollback on error - */ -export interface OptimisticUpdateOptions { - /** - * Function that returns the current state value (for rollback purposes) - */ - getCurrentState: () => T - /** - * Function that performs the optimistic update to the UI state - */ - optimisticUpdate: () => void - /** - * Async function that performs the actual API call - */ - apiCall: () => Promise - /** - * Function that rolls back the state to the original value - * @param originalValue - The value returned by getCurrentState before the update - */ - rollback: (originalValue: T) => void - /** - * Optional error message to log if the operation fails - */ - errorMessage?: string - /** - * Optional callback to execute on error (e.g., show toast notification) - */ - onError?: (error: Error, originalValue: T) => void - /** - * Optional callback that always runs regardless of success or error (e.g., to clear loading states) - */ - onComplete?: () => void -} - -/** - * Performs an optimistic update with automatic rollback on error. - * This utility standardizes the pattern of: - * 1. Save current state - * 2. Update UI optimistically - * 3. Make API call - * 4. Rollback on error - * - * @example - * ```typescript - * await withOptimisticUpdate({ - * getCurrentState: () => get().folders[id], - * optimisticUpdate: () => set(state => ({ - * folders: { ...state.folders, [id]: { ...folder, name: newName } } - * })), - * apiCall: async () => { - * await fetch(`/api/folders/${id}`, { - * method: 'PUT', - * body: JSON.stringify({ name: newName }) - * }) - * }, - * rollback: (originalFolder) => set(state => ({ - * folders: { ...state.folders, [id]: originalFolder } - * })), - * errorMessage: 'Failed to rename folder', - * onError: (error) => toast.error('Could not rename folder') - * }) - * ``` - */ -export async function withOptimisticUpdate(options: OptimisticUpdateOptions): Promise { - const { - getCurrentState, - optimisticUpdate, - apiCall, - rollback, - errorMessage, - onError, - onComplete, - } = options - - const originalValue = getCurrentState() - - optimisticUpdate() - - try { - await apiCall() - } catch (error) { - rollback(originalValue) - - if (errorMessage) { - logger.error(errorMessage, { error }) - } - - if (onError && error instanceof Error) { - onError(error, originalValue) - } - - throw error - } finally { - if (onComplete) { - onComplete() - } - } -} diff --git a/apps/sim/lib/folders/tree.ts b/apps/sim/lib/folders/tree.ts new file mode 100644 index 00000000000..41196a7e35b --- /dev/null +++ b/apps/sim/lib/folders/tree.ts @@ -0,0 +1,59 @@ +import type { FolderTreeNode, WorkflowFolder } from '@/stores/folders/types' + +export function buildFolderMap(folders: WorkflowFolder[]): Record { + return Object.fromEntries(folders.map((folder) => [folder.id, folder])) +} + +export function buildFolderTree( + folders: Record, + workspaceId: string +): FolderTreeNode[] { + const workspaceFolders = Object.values(folders).filter( + (folder) => folder.workspaceId === workspaceId + ) + + const buildTree = (parentId: string | null, level = 0): FolderTreeNode[] => { + return workspaceFolders + .filter((folder) => folder.parentId === parentId) + .sort((a, b) => a.sortOrder - b.sortOrder || a.name.localeCompare(b.name)) + .map((folder) => ({ + ...folder, + children: buildTree(folder.id, level + 1), + level, + })) + } + + return buildTree(null) +} + +export function getFolderById( + folders: Record, + folderId: string +): WorkflowFolder | undefined { + return folders[folderId] +} + +export function getChildFolders( + folders: Record, + parentId: string | null +): WorkflowFolder[] { + return Object.values(folders) + .filter((folder) => folder.parentId === parentId) + .sort((a, b) => a.sortOrder - b.sortOrder || a.name.localeCompare(b.name)) +} + +export function getFolderPath( + folders: Record, + folderId: string +): WorkflowFolder[] { + const path: WorkflowFolder[] = [] + let currentId: string | null = folderId + + while (currentId && folders[currentId]) { + const folder: WorkflowFolder = folders[currentId] + path.unshift(folder) + currentId = folder.parentId + } + + return path +} diff --git a/apps/sim/lib/knowledge/connectors/sync-engine.ts b/apps/sim/lib/knowledge/connectors/sync-engine.ts index e62e07179fc..e8fede9f0ea 100644 --- a/apps/sim/lib/knowledge/connectors/sync-engine.ts +++ b/apps/sim/lib/knowledge/connectors/sync-engine.ts @@ -152,13 +152,30 @@ export async function dispatchSync( const requestId = options?.requestId ?? crypto.randomUUID() if (isTriggerAvailable()) { + const connectorRows = await db + .select({ + knowledgeBaseId: knowledgeConnector.knowledgeBaseId, + workspaceId: knowledgeBase.workspaceId, + userId: knowledgeBase.userId, + }) + .from(knowledgeConnector) + .innerJoin(knowledgeBase, eq(knowledgeBase.id, knowledgeConnector.knowledgeBaseId)) + .where(eq(knowledgeConnector.id, connectorId)) + .limit(1) + + const row = connectorRows[0] + const tags = [`connectorId:${connectorId}`] + if (row?.knowledgeBaseId) tags.push(`knowledgeBaseId:${row.knowledgeBaseId}`) + if (row?.workspaceId) tags.push(`workspaceId:${row.workspaceId}`) + if (row?.userId) tags.push(`userId:${row.userId}`) + await knowledgeConnectorSync.trigger( { connectorId, fullSync: options?.fullSync, requestId, }, - { tags: [`connector:${connectorId}`] } + { tags } ) logger.info(`Dispatched connector sync to Trigger.dev`, { connectorId, requestId }) } else if (isBullMQEnabled()) { diff --git a/apps/sim/lib/knowledge/documents/service.ts b/apps/sim/lib/knowledge/documents/service.ts index 1c991b850f7..4da57c4e005 100644 --- a/apps/sim/lib/knowledge/documents/service.ts +++ b/apps/sim/lib/knowledge/documents/service.ts @@ -101,11 +101,8 @@ export interface DocumentData { } export interface ProcessingOptions { - chunkSize?: number - minCharactersPerChunk?: number recipe?: string lang?: string - chunkOverlap?: number } export interface DocumentJobData { @@ -123,7 +120,9 @@ export interface DocumentJobData { export async function dispatchDocumentProcessingJob(payload: DocumentJobData): Promise { if (isTriggerAvailable()) { - await tasks.trigger('knowledge-process-document', payload) + await tasks.trigger('knowledge-process-document', payload, { + tags: [`knowledgeBaseId:${payload.knowledgeBaseId}`, `documentId:${payload.documentId}`], + }) return } @@ -416,13 +415,7 @@ export async function processDocumentAsync( fileSize: number mimeType: string }, - processingOptions: { - chunkSize?: number - minCharactersPerChunk?: number - recipe?: string - lang?: string - chunkOverlap?: number - } + processingOptions: ProcessingOptions = {} ): Promise { const startTime = Date.now() try { @@ -456,7 +449,16 @@ export async function processDocumentAsync( logger.info(`[${documentId}] Status updated to 'processing', starting document processor`) - const kbConfig = kb[0].chunkingConfig as { maxSize: number; minSize: number; overlap: number } + const rawConfig = kb[0].chunkingConfig as { + maxSize?: number + minSize?: number + overlap?: number + } | null + const kbConfig = { + maxSize: rawConfig?.maxSize ?? 1024, + minSize: rawConfig?.minSize ?? 100, + overlap: rawConfig?.overlap ?? 200, + } await withTimeout( (async () => { @@ -464,9 +466,9 @@ export async function processDocumentAsync( docData.fileUrl, docData.filename, docData.mimeType, - processingOptions.chunkSize ?? kbConfig.maxSize, - processingOptions.chunkOverlap ?? kbConfig.overlap, - processingOptions.minCharactersPerChunk ?? kbConfig.minSize, + kbConfig.maxSize, + kbConfig.overlap, + kbConfig.minSize, kb[0].userId, kb[0].workspaceId ) @@ -692,7 +694,7 @@ export async function processDocumentsWithTrigger( payload: doc, options: { idempotencyKey: `doc-process-${doc.documentId}-${requestId}`, - tags: [`kb:${doc.knowledgeBaseId}`, `doc:${doc.documentId}`], + tags: [`knowledgeBaseId:${doc.knowledgeBaseId}`, `documentId:${doc.documentId}`], }, })) ) @@ -1573,16 +1575,6 @@ export async function retryDocumentProcessing( }, requestId: string ): Promise<{ success: boolean; status: string; message: string }> { - const kb = await db - .select({ - chunkingConfig: knowledgeBase.chunkingConfig, - }) - .from(knowledgeBase) - .where(eq(knowledgeBase.id, knowledgeBaseId)) - .limit(1) - - const kbConfig = kb[0].chunkingConfig as { maxSize: number; minSize: number; overlap: number } - await db.transaction(async (tx) => { await tx.delete(embedding).where(eq(embedding.documentId, documentId)) @@ -1600,14 +1592,6 @@ export async function retryDocumentProcessing( .where(eq(document.id, documentId)) }) - const processingOptions = { - chunkSize: kbConfig.maxSize, - minCharactersPerChunk: kbConfig.minSize, - recipe: 'default', - lang: 'en', - chunkOverlap: kbConfig.overlap, - } - await processDocumentsWithQueue( [ { @@ -1619,7 +1603,7 @@ export async function retryDocumentProcessing( }, ], knowledgeBaseId, - processingOptions, + {}, requestId ) diff --git a/apps/sim/lib/logs/events.ts b/apps/sim/lib/logs/events.ts index 54392e2ebbf..78f9529c71a 100644 --- a/apps/sim/lib/logs/events.ts +++ b/apps/sim/lib/logs/events.ts @@ -139,7 +139,13 @@ export async function emitWorkflowExecutionCompleted(log: WorkflowExecutionLog): } if (isTriggerDevEnabled) { - await workspaceNotificationDeliveryTask.trigger(payload) + await workspaceNotificationDeliveryTask.trigger(payload, { + tags: [ + `workspaceId:${workspaceId}`, + `workflowId:${log.workflowId}`, + `notificationType:${subscription.notificationType}`, + ], + }) logger.info( `Enqueued ${subscription.notificationType} notification ${deliveryId} via Trigger.dev` ) diff --git a/apps/sim/lib/notifications/inactivity-polling.ts b/apps/sim/lib/notifications/inactivity-polling.ts index 81aa0692dba..2fa689f42fc 100644 --- a/apps/sim/lib/notifications/inactivity-polling.ts +++ b/apps/sim/lib/notifications/inactivity-polling.ts @@ -189,7 +189,13 @@ async function checkWorkflowInactivity( } if (isTriggerDevEnabled) { - await workspaceNotificationDeliveryTask.trigger(payload) + await workspaceNotificationDeliveryTask.trigger(payload, { + tags: [ + `workspaceId:${workflowData.workspaceId}`, + `workflowId:${workflowId}`, + `notificationType:${subscription.notificationType}`, + ], + }) } else if (await enqueueNotificationDeliveryDispatch(payload)) { } else { void executeNotificationDelivery(payload).catch((error) => { diff --git a/apps/sim/lib/webhooks/processor.ts b/apps/sim/lib/webhooks/processor.ts index 3ded5d96a48..932ae3b05d6 100644 --- a/apps/sim/lib/webhooks/processor.ts +++ b/apps/sim/lib/webhooks/processor.ts @@ -1287,6 +1287,7 @@ export async function queueWebhookExecution( : await (await getJobQueue()).enqueue('webhook-execution', payload, { metadata: { workflowId: foundWorkflow.id, + workspaceId: foundWorkflow.workspaceId, userId: actorUserId, correlation, }, @@ -1317,6 +1318,7 @@ export async function queueWebhookExecution( : await jobQueue.enqueue('webhook-execution', payload, { metadata: { workflowId: foundWorkflow.id, + workspaceId: foundWorkflow.workspaceId, userId: actorUserId, correlation, }, diff --git a/apps/sim/lib/webhooks/utils.server.ts b/apps/sim/lib/webhooks/utils.server.ts index 3eec55697f4..4decf492eb5 100644 --- a/apps/sim/lib/webhooks/utils.server.ts +++ b/apps/sim/lib/webhooks/utils.server.ts @@ -1311,6 +1311,8 @@ export async function formatWebhookInput( extractAttioCommentData, extractAttioListEntryData, extractAttioListEntryUpdatedData, + extractAttioListData, + extractAttioWorkspaceMemberData, extractAttioGenericData, } = await import('@/triggers/attio/utils') @@ -1341,6 +1343,16 @@ export async function formatWebhookInput( if (triggerId === 'attio_list_entry_created' || triggerId === 'attio_list_entry_deleted') { return extractAttioListEntryData(body) } + if ( + triggerId === 'attio_list_created' || + triggerId === 'attio_list_updated' || + triggerId === 'attio_list_deleted' + ) { + return extractAttioListData(body) + } + if (triggerId === 'attio_workspace_member_created') { + return extractAttioWorkspaceMemberData(body) + } return extractAttioGenericData(body) } diff --git a/apps/sim/lib/workflows/comparison/resolve-values.ts b/apps/sim/lib/workflows/comparison/resolve-values.ts index 6858535a6ae..2fe7f24a34d 100644 --- a/apps/sim/lib/workflows/comparison/resolve-values.ts +++ b/apps/sim/lib/workflows/comparison/resolve-values.ts @@ -34,6 +34,8 @@ interface ResolutionContext { subBlockId: string /** The workflow ID for API calls */ workflowId: string + /** The workspace scope for selector-based lookups */ + workspaceId?: string /** The current workflow state for extracting additional context */ currentState: WorkflowState /** The block ID being resolved */ @@ -64,13 +66,15 @@ async function resolveCredential(credentialId: string, workflowId: string): Prom } } -async function resolveWorkflow(workflowId: string): Promise { +async function resolveWorkflow(workflowId: string, workspaceId?: string): Promise { + if (!workspaceId) return null + try { const definition = getSelectorDefinition('sim.workflows') if (definition.fetchById) { const result = await definition.fetchById({ key: 'sim.workflows', - context: {}, + context: { workspaceId }, detailId: workflowId, }) return result?.label ?? null @@ -141,11 +145,12 @@ export function formatValueForDisplay(value: unknown): string { function extractSelectorContext( blockId: string, currentState: WorkflowState, - workflowId: string + workflowId: string, + workspaceId?: string ): SelectorContext { const block = currentState.blocks?.[blockId] - if (!block?.subBlocks) return { workflowId } - return buildSelectorContextFromBlock(block.type, block.subBlocks, { workflowId }) + if (!block?.subBlocks) return { workflowId, workspaceId } + return buildSelectorContextFromBlock(block.type, block.subBlocks, { workflowId, workspaceId }) } /** @@ -177,8 +182,13 @@ export async function resolveValueForDisplay( const semanticFallback = getSemanticFallback(subBlockConfig) const selectorCtx = context.blockId - ? extractSelectorContext(context.blockId, context.currentState, context.workflowId) - : { workflowId: context.workflowId } + ? extractSelectorContext( + context.blockId, + context.currentState, + context.workflowId, + context.workspaceId + ) + : { workflowId: context.workflowId, workspaceId: context.workspaceId } // Credential fields (oauth-input or credential subBlockId) const isCredentialField = @@ -194,7 +204,7 @@ export async function resolveValueForDisplay( // Workflow selector if (subBlockConfig?.type === 'workflow-selector' && isUuid(value)) { - const label = await resolveWorkflow(value) + const label = await resolveWorkflow(value, selectorCtx.workspaceId) if (label) { return { original: value, displayLabel: label, resolved: true } } diff --git a/apps/sim/lib/workflows/subblocks/context.test.ts b/apps/sim/lib/workflows/subblocks/context.test.ts index c30f1f1b0af..29cad46d941 100644 --- a/apps/sim/lib/workflows/subblocks/context.test.ts +++ b/apps/sim/lib/workflows/subblocks/context.test.ts @@ -66,6 +66,16 @@ describe('buildSelectorContextFromBlock', () => { expect(ctx.workflowId).toBe('wf-123') }) + it('should pass through workspaceId from opts', () => { + const ctx = buildSelectorContextFromBlock( + 'knowledge', + { operation: { id: 'operation', type: 'dropdown', value: 'search' } }, + { workspaceId: 'ws-123' } + ) + + expect(ctx.workspaceId).toBe('ws-123') + }) + it('should ignore subblock keys not in SELECTOR_CONTEXT_FIELDS', () => { const ctx = buildSelectorContextFromBlock('knowledge', { operation: { id: 'operation', type: 'dropdown', value: 'search' }, diff --git a/apps/sim/lib/workflows/subblocks/context.ts b/apps/sim/lib/workflows/subblocks/context.ts index 9b43bc892f3..affd888e2aa 100644 --- a/apps/sim/lib/workflows/subblocks/context.ts +++ b/apps/sim/lib/workflows/subblocks/context.ts @@ -34,10 +34,11 @@ export const SELECTOR_CONTEXT_FIELDS = new Set([ export function buildSelectorContextFromBlock( blockType: string, subBlocks: Record, - opts?: { workflowId?: string } + opts?: { workflowId?: string; workspaceId?: string } ): SelectorContext { const context: SelectorContext = {} if (opts?.workflowId) context.workflowId = opts.workflowId + if (opts?.workspaceId) context.workspaceId = opts.workspaceId const blockConfig = getBlock(blockType) if (!blockConfig) return context diff --git a/apps/sim/package.json b/apps/sim/package.json index 46ed560593d..8fa56f2078c 100644 --- a/apps/sim/package.json +++ b/apps/sim/package.json @@ -40,6 +40,7 @@ "@aws-sdk/client-dynamodb": "3.940.0", "@aws-sdk/client-rds-data": "3.940.0", "@aws-sdk/client-s3": "^3.779.0", + "@aws-sdk/client-secrets-manager": "3.1021.0", "@aws-sdk/client-sqs": "3.947.0", "@aws-sdk/lib-dynamodb": "3.940.0", "@aws-sdk/s3-request-presigner": "^3.779.0", diff --git a/apps/sim/providers/fireworks/index.ts b/apps/sim/providers/fireworks/index.ts new file mode 100644 index 00000000000..b5dd9d41964 --- /dev/null +++ b/apps/sim/providers/fireworks/index.ts @@ -0,0 +1,623 @@ +import { createLogger } from '@sim/logger' +import OpenAI from 'openai' +import type { ChatCompletionCreateParamsStreaming } from 'openai/resources/chat/completions' +import type { StreamingExecution } from '@/executor/types' +import { MAX_TOOL_ITERATIONS } from '@/providers' +import { + checkForForcedToolUsage, + createReadableStreamFromOpenAIStream, + supportsNativeStructuredOutputs, +} from '@/providers/fireworks/utils' +import { getProviderDefaultModel, getProviderModels } from '@/providers/models' +import type { + FunctionCallResponse, + Message, + ProviderConfig, + ProviderRequest, + ProviderResponse, + TimeSegment, +} from '@/providers/types' +import { ProviderError } from '@/providers/types' +import { + calculateCost, + generateSchemaInstructions, + prepareToolExecution, + prepareToolsWithUsageControl, + sumToolCosts, +} from '@/providers/utils' +import { executeTool } from '@/tools' + +const logger = createLogger('FireworksProvider') + +/** + * Applies structured output configuration to a payload based on model capabilities. + * Uses json_schema with strict mode for supported models, falls back to json_object with prompt instructions. + */ +async function applyResponseFormat( + targetPayload: any, + messages: any[], + responseFormat: any, + model: string +): Promise { + const useNative = await supportsNativeStructuredOutputs(model) + + if (useNative) { + logger.info('Using native structured outputs for Fireworks model', { model }) + targetPayload.response_format = { + type: 'json_schema', + json_schema: { + name: responseFormat.name || 'response_schema', + schema: responseFormat.schema || responseFormat, + strict: responseFormat.strict !== false, + }, + } + return messages + } + + logger.info('Using json_object mode with prompt instructions for Fireworks model', { model }) + const schema = responseFormat.schema || responseFormat + const schemaInstructions = generateSchemaInstructions(schema, responseFormat.name) + targetPayload.response_format = { type: 'json_object' } + return [...messages, { role: 'user', content: schemaInstructions }] +} + +export const fireworksProvider: ProviderConfig = { + id: 'fireworks', + name: 'Fireworks', + description: 'Fast inference for open-source models via Fireworks AI', + version: '1.0.0', + models: getProviderModels('fireworks'), + defaultModel: getProviderDefaultModel('fireworks'), + + executeRequest: async ( + request: ProviderRequest + ): Promise => { + if (!request.apiKey) { + throw new Error('API key is required for Fireworks') + } + + const client = new OpenAI({ + apiKey: request.apiKey, + baseURL: 'https://api.fireworks.ai/inference/v1', + }) + + const requestedModel = request.model.replace(/^fireworks\//, '') + + logger.info('Preparing Fireworks request', { + model: requestedModel, + hasSystemPrompt: !!request.systemPrompt, + hasMessages: !!request.messages?.length, + hasTools: !!request.tools?.length, + toolCount: request.tools?.length || 0, + hasResponseFormat: !!request.responseFormat, + stream: !!request.stream, + }) + + const allMessages: Message[] = [] + + if (request.systemPrompt) { + allMessages.push({ role: 'system', content: request.systemPrompt }) + } + + if (request.context) { + allMessages.push({ role: 'user', content: request.context }) + } + + if (request.messages) { + allMessages.push(...request.messages) + } + + const tools = request.tools?.length + ? request.tools.map((tool) => ({ + type: 'function', + function: { + name: tool.id, + description: tool.description, + parameters: tool.parameters, + }, + })) + : undefined + + const payload: any = { + model: requestedModel, + messages: allMessages, + } + + if (request.temperature !== undefined) payload.temperature = request.temperature + if (request.maxTokens != null) payload.max_tokens = request.maxTokens + + let preparedTools: ReturnType | null = null + let hasActiveTools = false + if (tools?.length) { + preparedTools = prepareToolsWithUsageControl(tools, request.tools, logger, 'fireworks') + const { tools: filteredTools, toolChoice } = preparedTools + if (filteredTools?.length && toolChoice) { + payload.tools = filteredTools + payload.tool_choice = toolChoice + hasActiveTools = true + } + } + + const providerStartTime = Date.now() + const providerStartTimeISO = new Date(providerStartTime).toISOString() + + try { + if (request.responseFormat && !hasActiveTools) { + payload.messages = await applyResponseFormat( + payload, + payload.messages, + request.responseFormat, + requestedModel + ) + } + + if (request.stream && (!tools || tools.length === 0 || !hasActiveTools)) { + const streamingParams: ChatCompletionCreateParamsStreaming = { + ...payload, + stream: true, + stream_options: { include_usage: true }, + } + const streamResponse = await client.chat.completions.create( + streamingParams, + request.abortSignal ? { signal: request.abortSignal } : undefined + ) + + const streamingResult = { + stream: createReadableStreamFromOpenAIStream(streamResponse, (content, usage) => { + streamingResult.execution.output.content = content + streamingResult.execution.output.tokens = { + input: usage.prompt_tokens, + output: usage.completion_tokens, + total: usage.total_tokens, + } + + const costResult = calculateCost( + requestedModel, + usage.prompt_tokens, + usage.completion_tokens + ) + streamingResult.execution.output.cost = { + input: costResult.input, + output: costResult.output, + total: costResult.total, + } + + const end = Date.now() + const endISO = new Date(end).toISOString() + if (streamingResult.execution.output.providerTiming) { + streamingResult.execution.output.providerTiming.endTime = endISO + streamingResult.execution.output.providerTiming.duration = end - providerStartTime + if (streamingResult.execution.output.providerTiming.timeSegments?.[0]) { + streamingResult.execution.output.providerTiming.timeSegments[0].endTime = end + streamingResult.execution.output.providerTiming.timeSegments[0].duration = + end - providerStartTime + } + } + }), + execution: { + success: true, + output: { + content: '', + model: requestedModel, + tokens: { input: 0, output: 0, total: 0 }, + toolCalls: undefined, + providerTiming: { + startTime: providerStartTimeISO, + endTime: new Date().toISOString(), + duration: Date.now() - providerStartTime, + timeSegments: [ + { + type: 'model', + name: 'Streaming response', + startTime: providerStartTime, + endTime: Date.now(), + duration: Date.now() - providerStartTime, + }, + ], + }, + cost: { input: 0, output: 0, total: 0 }, + }, + logs: [], + metadata: { + startTime: providerStartTimeISO, + endTime: new Date().toISOString(), + duration: Date.now() - providerStartTime, + }, + }, + } as StreamingExecution + + return streamingResult as StreamingExecution + } + + const initialCallTime = Date.now() + const originalToolChoice = payload.tool_choice + const forcedTools = preparedTools?.forcedTools || [] + let usedForcedTools: string[] = [] + + let currentResponse = await client.chat.completions.create( + payload, + request.abortSignal ? { signal: request.abortSignal } : undefined + ) + const firstResponseTime = Date.now() - initialCallTime + + let content = currentResponse.choices[0]?.message?.content || '' + const tokens = { + input: currentResponse.usage?.prompt_tokens || 0, + output: currentResponse.usage?.completion_tokens || 0, + total: currentResponse.usage?.total_tokens || 0, + } + const toolCalls: FunctionCallResponse[] = [] + const toolResults: Record[] = [] + const currentMessages = [...allMessages] + let iterationCount = 0 + let modelTime = firstResponseTime + let toolsTime = 0 + let hasUsedForcedTool = false + const timeSegments: TimeSegment[] = [ + { + type: 'model', + name: 'Initial response', + startTime: initialCallTime, + endTime: initialCallTime + firstResponseTime, + duration: firstResponseTime, + }, + ] + + const forcedToolResult = checkForForcedToolUsage( + currentResponse, + originalToolChoice, + forcedTools, + usedForcedTools + ) + hasUsedForcedTool = forcedToolResult.hasUsedForcedTool + usedForcedTools = forcedToolResult.usedForcedTools + + while (iterationCount < MAX_TOOL_ITERATIONS) { + if (currentResponse.choices[0]?.message?.content) { + content = currentResponse.choices[0].message.content + } + + const toolCallsInResponse = currentResponse.choices[0]?.message?.tool_calls + if (!toolCallsInResponse || toolCallsInResponse.length === 0) { + break + } + + const toolsStartTime = Date.now() + + const toolExecutionPromises = toolCallsInResponse.map(async (toolCall) => { + const toolCallStartTime = Date.now() + const toolName = toolCall.function.name + + try { + const toolArgs = JSON.parse(toolCall.function.arguments) + const tool = request.tools?.find((t) => t.id === toolName) + + if (!tool) return null + + const { toolParams, executionParams } = prepareToolExecution(tool, toolArgs, request) + const result = await executeTool(toolName, executionParams) + const toolCallEndTime = Date.now() + + return { + toolCall, + toolName, + toolParams, + result, + startTime: toolCallStartTime, + endTime: toolCallEndTime, + duration: toolCallEndTime - toolCallStartTime, + } + } catch (error) { + const toolCallEndTime = Date.now() + logger.error('Error processing tool call (Fireworks):', { + error: error instanceof Error ? error.message : String(error), + toolName, + }) + + return { + toolCall, + toolName, + toolParams: {}, + result: { + success: false, + output: undefined, + error: error instanceof Error ? error.message : 'Tool execution failed', + }, + startTime: toolCallStartTime, + endTime: toolCallEndTime, + duration: toolCallEndTime - toolCallStartTime, + } + } + }) + + const executionResults = await Promise.allSettled(toolExecutionPromises) + + currentMessages.push({ + role: 'assistant', + content: null, + tool_calls: toolCallsInResponse.map((tc) => ({ + id: tc.id, + type: 'function', + function: { + name: tc.function.name, + arguments: tc.function.arguments, + }, + })), + }) + + for (const settledResult of executionResults) { + if (settledResult.status === 'rejected' || !settledResult.value) continue + + const { toolCall, toolName, toolParams, result, startTime, endTime, duration } = + settledResult.value + + timeSegments.push({ + type: 'tool', + name: toolName, + startTime: startTime, + endTime: endTime, + duration: duration, + }) + + let resultContent: any + if (result.success) { + toolResults.push(result.output!) + resultContent = result.output + } else { + resultContent = { + error: true, + message: result.error || 'Tool execution failed', + tool: toolName, + } + } + + toolCalls.push({ + name: toolName, + arguments: toolParams, + startTime: new Date(startTime).toISOString(), + endTime: new Date(endTime).toISOString(), + duration: duration, + result: resultContent, + success: result.success, + }) + + currentMessages.push({ + role: 'tool', + tool_call_id: toolCall.id, + content: JSON.stringify(resultContent), + }) + } + + const thisToolsTime = Date.now() - toolsStartTime + toolsTime += thisToolsTime + + const nextPayload = { + ...payload, + messages: currentMessages, + } + + if (typeof originalToolChoice === 'object' && hasUsedForcedTool && forcedTools.length > 0) { + const remainingTools = forcedTools.filter((tool) => !usedForcedTools.includes(tool)) + if (remainingTools.length > 0) { + nextPayload.tool_choice = { type: 'function', function: { name: remainingTools[0] } } + } else { + nextPayload.tool_choice = 'auto' + } + } + + const nextModelStartTime = Date.now() + currentResponse = await client.chat.completions.create( + nextPayload, + request.abortSignal ? { signal: request.abortSignal } : undefined + ) + const nextForcedToolResult = checkForForcedToolUsage( + currentResponse, + nextPayload.tool_choice, + forcedTools, + usedForcedTools + ) + hasUsedForcedTool = nextForcedToolResult.hasUsedForcedTool + usedForcedTools = nextForcedToolResult.usedForcedTools + const nextModelEndTime = Date.now() + const thisModelTime = nextModelEndTime - nextModelStartTime + timeSegments.push({ + type: 'model', + name: `Model response (iteration ${iterationCount + 1})`, + startTime: nextModelStartTime, + endTime: nextModelEndTime, + duration: thisModelTime, + }) + modelTime += thisModelTime + if (currentResponse.choices[0]?.message?.content) { + content = currentResponse.choices[0].message.content + } + if (currentResponse.usage) { + tokens.input += currentResponse.usage.prompt_tokens || 0 + tokens.output += currentResponse.usage.completion_tokens || 0 + tokens.total += currentResponse.usage.total_tokens || 0 + } + iterationCount++ + } + + if (request.stream) { + const accumulatedCost = calculateCost(requestedModel, tokens.input, tokens.output) + + const streamingParams: ChatCompletionCreateParamsStreaming = { + ...payload, + messages: [...currentMessages], + tool_choice: 'auto', + stream: true, + stream_options: { include_usage: true }, + } + + if (request.responseFormat) { + ;(streamingParams as any).messages = await applyResponseFormat( + streamingParams as any, + streamingParams.messages, + request.responseFormat, + requestedModel + ) + } + + const streamResponse = await client.chat.completions.create( + streamingParams, + request.abortSignal ? { signal: request.abortSignal } : undefined + ) + + const streamingResult = { + stream: createReadableStreamFromOpenAIStream(streamResponse, (content, usage) => { + streamingResult.execution.output.content = content + streamingResult.execution.output.tokens = { + input: tokens.input + usage.prompt_tokens, + output: tokens.output + usage.completion_tokens, + total: tokens.total + usage.total_tokens, + } + + const streamCost = calculateCost( + requestedModel, + usage.prompt_tokens, + usage.completion_tokens + ) + const tc = sumToolCosts(toolResults) + streamingResult.execution.output.cost = { + input: accumulatedCost.input + streamCost.input, + output: accumulatedCost.output + streamCost.output, + toolCost: tc || undefined, + total: accumulatedCost.total + streamCost.total + tc, + } + }), + execution: { + success: true, + output: { + content: '', + model: requestedModel, + tokens: { input: tokens.input, output: tokens.output, total: tokens.total }, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, + providerTiming: { + startTime: providerStartTimeISO, + endTime: new Date().toISOString(), + duration: Date.now() - providerStartTime, + modelTime: modelTime, + toolsTime: toolsTime, + firstResponseTime: firstResponseTime, + iterations: iterationCount + 1, + timeSegments: timeSegments, + }, + cost: { + input: accumulatedCost.input, + output: accumulatedCost.output, + total: accumulatedCost.total, + }, + }, + logs: [], + metadata: { + startTime: providerStartTimeISO, + endTime: new Date().toISOString(), + duration: Date.now() - providerStartTime, + }, + }, + } as StreamingExecution + + return streamingResult as StreamingExecution + } + + if (request.responseFormat && hasActiveTools) { + const finalPayload: any = { + model: payload.model, + messages: [...currentMessages], + } + if (payload.temperature !== undefined) { + finalPayload.temperature = payload.temperature + } + if (payload.max_tokens !== undefined) { + finalPayload.max_tokens = payload.max_tokens + } + + finalPayload.messages = await applyResponseFormat( + finalPayload, + finalPayload.messages, + request.responseFormat, + requestedModel + ) + + const finalStartTime = Date.now() + const finalResponse = await client.chat.completions.create( + finalPayload, + request.abortSignal ? { signal: request.abortSignal } : undefined + ) + const finalEndTime = Date.now() + const finalDuration = finalEndTime - finalStartTime + + timeSegments.push({ + type: 'model', + name: 'Final structured response', + startTime: finalStartTime, + endTime: finalEndTime, + duration: finalDuration, + }) + modelTime += finalDuration + + if (finalResponse.choices[0]?.message?.content) { + content = finalResponse.choices[0].message.content + } + if (finalResponse.usage) { + tokens.input += finalResponse.usage.prompt_tokens || 0 + tokens.output += finalResponse.usage.completion_tokens || 0 + tokens.total += finalResponse.usage.total_tokens || 0 + } + } + + const providerEndTime = Date.now() + const providerEndTimeISO = new Date(providerEndTime).toISOString() + const totalDuration = providerEndTime - providerStartTime + + return { + content, + model: requestedModel, + tokens, + toolCalls: toolCalls.length > 0 ? toolCalls : undefined, + toolResults: toolResults.length > 0 ? toolResults : undefined, + timing: { + startTime: providerStartTimeISO, + endTime: providerEndTimeISO, + duration: totalDuration, + modelTime: modelTime, + toolsTime: toolsTime, + firstResponseTime: firstResponseTime, + iterations: iterationCount + 1, + timeSegments: timeSegments, + }, + } + } catch (error) { + const providerEndTime = Date.now() + const providerEndTimeISO = new Date(providerEndTime).toISOString() + const totalDuration = providerEndTime - providerStartTime + + const errorDetails: Record = { + error: error instanceof Error ? error.message : String(error), + duration: totalDuration, + } + if (error && typeof error === 'object') { + const err = error as any + if (err.status) errorDetails.status = err.status + if (err.code) errorDetails.code = err.code + if (err.type) errorDetails.type = err.type + if (err.error?.message) errorDetails.providerMessage = err.error.message + if (err.error?.metadata) errorDetails.metadata = err.error.metadata + } + + logger.error('Error in Fireworks request:', errorDetails) + throw new ProviderError(error instanceof Error ? error.message : String(error), { + startTime: providerStartTimeISO, + endTime: providerEndTimeISO, + duration: totalDuration, + }) + } + }, +} diff --git a/apps/sim/providers/fireworks/utils.ts b/apps/sim/providers/fireworks/utils.ts new file mode 100644 index 00000000000..70444e07b69 --- /dev/null +++ b/apps/sim/providers/fireworks/utils.ts @@ -0,0 +1,41 @@ +import type { ChatCompletionChunk } from 'openai/resources/chat/completions' +import type { CompletionUsage } from 'openai/resources/completions' +import { checkForForcedToolUsageOpenAI, createOpenAICompatibleStream } from '@/providers/utils' + +/** + * Checks if a model supports native structured outputs (json_schema). + * Fireworks AI supports structured outputs across their inference API. + */ +export async function supportsNativeStructuredOutputs(_modelId: string): Promise { + return true +} + +/** + * Creates a ReadableStream from a Fireworks streaming response. + * Uses the shared OpenAI-compatible streaming utility. + */ +export function createReadableStreamFromOpenAIStream( + openaiStream: AsyncIterable, + onComplete?: (content: string, usage: CompletionUsage) => void +): ReadableStream { + return createOpenAICompatibleStream(openaiStream, 'Fireworks', onComplete) +} + +/** + * Checks if a forced tool was used in a Fireworks response. + * Uses the shared OpenAI-compatible forced tool usage helper. + */ +export function checkForForcedToolUsage( + response: any, + toolChoice: string | { type: string; function?: { name: string }; name?: string; any?: any }, + forcedTools: string[], + usedForcedTools: string[] +): { hasUsedForcedTool: boolean; usedForcedTools: string[] } { + return checkForForcedToolUsageOpenAI( + response, + toolChoice, + 'Fireworks', + forcedTools, + usedForcedTools + ) +} diff --git a/apps/sim/providers/models.ts b/apps/sim/providers/models.ts index 37f973198ce..345cc4daeba 100644 --- a/apps/sim/providers/models.ts +++ b/apps/sim/providers/models.ts @@ -14,6 +14,7 @@ import { BedrockIcon, CerebrasIcon, DeepseekIcon, + FireworksIcon, GeminiIcon, GroqIcon, MistralIcon, @@ -24,7 +25,7 @@ import { VllmIcon, xAIIcon, } from '@/components/icons' -import type { ModelPricing } from '@/providers/types' +import type { ModelPricing, ProviderId } from '@/providers/types' export interface ModelCapabilities { temperature?: { @@ -71,6 +72,20 @@ export interface ProviderDefinition { } export const PROVIDER_DEFINITIONS: Record = { + fireworks: { + id: 'fireworks', + name: 'Fireworks', + description: 'Fast inference for open-source models via Fireworks AI', + defaultModel: '', + modelPatterns: [/^fireworks\//], + icon: FireworksIcon, + capabilities: { + temperature: { min: 0, max: 2 }, + toolUsageControl: true, + }, + contextInformationAvailable: false, + models: [], + }, openrouter: { id: 'openrouter', name: 'OpenRouter', @@ -156,6 +171,44 @@ export const PROVIDER_DEFINITIONS: Record = { }, contextWindow: 1050000, }, + { + id: 'gpt-5.4-mini', + pricing: { + input: 0.75, + cachedInput: 0.075, + output: 4.5, + updatedAt: '2026-03-17', + }, + capabilities: { + reasoningEffort: { + values: ['none', 'low', 'medium', 'high', 'xhigh'], + }, + verbosity: { + values: ['low', 'medium', 'high'], + }, + maxOutputTokens: 128000, + }, + contextWindow: 400000, + }, + { + id: 'gpt-5.4-nano', + pricing: { + input: 0.2, + cachedInput: 0.02, + output: 1.25, + updatedAt: '2026-03-17', + }, + capabilities: { + reasoningEffort: { + values: ['none', 'low', 'medium', 'high', 'xhigh'], + }, + verbosity: { + values: ['low', 'medium', 'high'], + }, + maxOutputTokens: 128000, + }, + contextWindow: 400000, + }, { id: 'gpt-5.2', pricing: { @@ -546,6 +599,44 @@ export const PROVIDER_DEFINITIONS: Record = { }, contextWindow: 1050000, }, + { + id: 'azure/gpt-5.4-mini', + pricing: { + input: 0.75, + cachedInput: 0.075, + output: 4.5, + updatedAt: '2026-03-17', + }, + capabilities: { + reasoningEffort: { + values: ['none', 'low', 'medium', 'high', 'xhigh'], + }, + verbosity: { + values: ['low', 'medium', 'high'], + }, + maxOutputTokens: 128000, + }, + contextWindow: 400000, + }, + { + id: 'azure/gpt-5.4-nano', + pricing: { + input: 0.2, + cachedInput: 0.02, + output: 1.25, + updatedAt: '2026-03-17', + }, + capabilities: { + reasoningEffort: { + values: ['none', 'low', 'medium', 'high', 'xhigh'], + }, + verbosity: { + values: ['low', 'medium', 'high'], + }, + maxOutputTokens: 128000, + }, + contextWindow: 400000, + }, { id: 'azure/gpt-5.2', pricing: { @@ -2284,6 +2375,45 @@ export function getProviderModels(providerId: string): string[] { return PROVIDER_DEFINITIONS[providerId]?.models.map((m) => m.id) || [] } +export function getBaseModelProviders(): Record { + return Object.entries(PROVIDER_DEFINITIONS) + .filter(([providerId]) => !['ollama', 'vllm', 'openrouter'].includes(providerId)) + .reduce( + (map, [providerId, provider]) => { + provider.models.forEach((model) => { + map[model.id.toLowerCase()] = providerId as ProviderId + }) + return map + }, + {} as Record + ) +} + +export function getProviderFromModel(model: string): ProviderId { + const normalizedModel = model.toLowerCase() + + for (const [providerId, provider] of Object.entries(PROVIDER_DEFINITIONS)) { + if ( + provider.models.some((providerModel) => providerModel.id.toLowerCase() === normalizedModel) + ) { + return providerId as ProviderId + } + } + + for (const [providerId, provider] of Object.entries(PROVIDER_DEFINITIONS)) { + if (provider.modelPatterns?.some((pattern) => pattern.test(normalizedModel))) { + return providerId as ProviderId + } + } + + return 'ollama' +} + +export function getProviderIcon(model: string): React.ComponentType<{ className?: string }> | null { + const providerId = getProviderFromModel(model) + return PROVIDER_DEFINITIONS[providerId]?.icon || null +} + export function getProviderDefaultModel(providerId: string): string { return PROVIDER_DEFINITIONS[providerId]?.defaultModel || '' } @@ -2424,6 +2554,18 @@ export function updateVLLMModels(models: string[]): void { })) } +export function updateFireworksModels(models: string[]): void { + PROVIDER_DEFINITIONS.fireworks.models = models.map((modelId) => ({ + id: modelId, + pricing: { + input: 0, + output: 0, + updatedAt: new Date().toISOString().split('T')[0], + }, + capabilities: {}, + })) +} + export function updateOpenRouterModels(models: string[]): void { PROVIDER_DEFINITIONS.openrouter.models = models.map((modelId) => ({ id: modelId, diff --git a/apps/sim/providers/registry.ts b/apps/sim/providers/registry.ts index 3f7be20c947..088686500b3 100644 --- a/apps/sim/providers/registry.ts +++ b/apps/sim/providers/registry.ts @@ -5,6 +5,7 @@ import { azureOpenAIProvider } from '@/providers/azure-openai' import { bedrockProvider } from '@/providers/bedrock' import { cerebrasProvider } from '@/providers/cerebras' import { deepseekProvider } from '@/providers/deepseek' +import { fireworksProvider } from '@/providers/fireworks' import { googleProvider } from '@/providers/google' import { groqProvider } from '@/providers/groq' import { mistralProvider } from '@/providers/mistral' @@ -32,6 +33,7 @@ const providerRegistry: Record = { mistral: mistralProvider, 'azure-openai': azureOpenAIProvider, openrouter: openRouterProvider, + fireworks: fireworksProvider, ollama: ollamaProvider, bedrock: bedrockProvider, } diff --git a/apps/sim/providers/types.ts b/apps/sim/providers/types.ts index 9dd78eb643d..69c36079df7 100644 --- a/apps/sim/providers/types.ts +++ b/apps/sim/providers/types.ts @@ -14,6 +14,7 @@ export type ProviderId = | 'mistral' | 'ollama' | 'openrouter' + | 'fireworks' | 'vllm' | 'bedrock' diff --git a/apps/sim/providers/utils.ts b/apps/sim/providers/utils.ts index eeadb8cacc1..30fe1467eb1 100644 --- a/apps/sim/providers/utils.ts +++ b/apps/sim/providers/utils.ts @@ -147,6 +147,7 @@ export const providers: Record = { mistral: buildProviderMetadata('mistral'), bedrock: buildProviderMetadata('bedrock'), openrouter: buildProviderMetadata('openrouter'), + fireworks: buildProviderMetadata('fireworks'), } export function updateOllamaProviderModels(models: string[]): void { @@ -166,11 +167,20 @@ export async function updateOpenRouterProviderModels(models: string[]): Promise< providers.openrouter.models = getProviderModelsFromDefinitions('openrouter') } +export async function updateFireworksProviderModels(models: string[]): Promise { + const { updateFireworksModels } = await import('@/providers/models') + updateFireworksModels(models) + providers.fireworks.models = getProviderModelsFromDefinitions('fireworks') +} + export function getBaseModelProviders(): Record { const allProviders = Object.entries(providers) .filter( ([providerId]) => - providerId !== 'ollama' && providerId !== 'vllm' && providerId !== 'openrouter' + providerId !== 'ollama' && + providerId !== 'vllm' && + providerId !== 'openrouter' && + providerId !== 'fireworks' ) .reduce( (map, [providerId, config]) => { diff --git a/apps/sim/stores/folders/store.ts b/apps/sim/stores/folders/store.ts index c21f0cc03b7..88a5211e6bf 100644 --- a/apps/sim/stores/folders/store.ts +++ b/apps/sim/stores/folders/store.ts @@ -1,12 +1,10 @@ import { createLogger } from '@sim/logger' import { create } from 'zustand' import { devtools } from 'zustand/middleware' -import type { FolderTreeNode, WorkflowFolder } from './types' const logger = createLogger('FoldersStore') interface FolderState { - folders: Record expandedFolders: Set selectedWorkflows: Set selectedFolders: Set @@ -14,7 +12,6 @@ interface FolderState { selectedTasks: Set lastSelectedTaskId: string | null - setFolders: (folders: WorkflowFolder[]) => void toggleExpanded: (folderId: string) => void setExpanded: (folderId: string, expanded: boolean) => void @@ -48,18 +45,11 @@ interface FolderState { hasAnySelection: () => boolean isMixedSelection: () => boolean clearAllSelection: () => void - - // Computed values - getFolderTree: (workspaceId: string) => FolderTreeNode[] - getFolderById: (id: string) => WorkflowFolder | undefined - getChildFolders: (parentId: string | null) => WorkflowFolder[] - getFolderPath: (folderId: string) => WorkflowFolder[] } export const useFolderStore = create()( devtools( (set, get) => ({ - folders: {}, expandedFolders: new Set(), selectedWorkflows: new Set(), selectedFolders: new Set(), @@ -67,17 +57,6 @@ export const useFolderStore = create()( selectedTasks: new Set(), lastSelectedTaskId: null, - setFolders: (folders) => - set(() => ({ - folders: folders.reduce( - (acc, folder) => { - acc[folder.id] = folder - return acc - }, - {} as Record - ), - })), - toggleExpanded: (folderId) => set((state) => { const newExpanded = new Set(state.expandedFolders) @@ -312,50 +291,6 @@ export const useFolderStore = create()( selectedTasks: new Set(), lastSelectedTaskId: null, }), - - getFolderTree: (workspaceId) => { - const folders = Object.values(get().folders).filter((f) => f.workspaceId === workspaceId) - - const buildTree = (parentId: string | null, level = 0): FolderTreeNode[] => { - return folders - .filter((folder) => folder.parentId === parentId) - .sort( - (a: WorkflowFolder, b: WorkflowFolder) => - a.sortOrder - b.sortOrder || a.name.localeCompare(b.name) - ) - .map((folder) => ({ - ...folder, - children: buildTree(folder.id, level + 1), - level, - })) - } - - return buildTree(null) - }, - - getFolderById: (id) => get().folders[id], - - getChildFolders: (parentId) => - Object.values(get().folders) - .filter((folder) => folder.parentId === parentId) - .sort( - (a: WorkflowFolder, b: WorkflowFolder) => - a.sortOrder - b.sortOrder || a.name.localeCompare(b.name) - ), - - getFolderPath: (folderId) => { - const folders = get().folders - const path: WorkflowFolder[] = [] - let currentId: string | null = folderId - - while (currentId && folders[currentId]) { - const folder: WorkflowFolder = folders[currentId] - path.unshift(folder) - currentId = folder.parentId - } - - return path - }, }), { name: 'folder-store' } ) diff --git a/apps/sim/stores/index.ts b/apps/sim/stores/index.ts index 4ae3c335f07..d1bbbc9b227 100644 --- a/apps/sim/stores/index.ts +++ b/apps/sim/stores/index.ts @@ -201,7 +201,6 @@ export { export const resetAllStores = () => { // Reset all stores to initial state useWorkflowRegistry.setState({ - workflows: {}, activeWorkflowId: null, error: null, deploymentStatuses: {}, diff --git a/apps/sim/stores/panel/variables/store.ts b/apps/sim/stores/panel/variables/store.ts index 70ebc2b7191..e9a7db871c5 100644 --- a/apps/sim/stores/panel/variables/store.ts +++ b/apps/sim/stores/panel/variables/store.ts @@ -5,7 +5,6 @@ import { devtools } from 'zustand/middleware' import { normalizeName } from '@/executor/constants' import { useOperationQueueStore } from '@/stores/operation-queue/store' import type { Variable, VariablesStore } from '@/stores/panel/variables/types' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' const logger = createLogger('VariablesStore') @@ -175,10 +174,10 @@ export const useVariablesStore = create()( update.name = undefined } else if (newName !== oldVariableName) { const subBlockStore = useSubBlockStore.getState() - const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId + const targetWorkflowId = oldVariable.workflowId - if (activeWorkflowId) { - const workflowValues = subBlockStore.workflowValues[activeWorkflowId] || {} + if (targetWorkflowId) { + const workflowValues = subBlockStore.workflowValues[targetWorkflowId] || {} const updatedWorkflowValues = { ...workflowValues } const changedSubBlocks: Array<{ blockId: string; subBlockId: string; value: any }> = [] @@ -227,7 +226,7 @@ export const useVariablesStore = create()( useSubBlockStore.setState({ workflowValues: { ...subBlockStore.workflowValues, - [activeWorkflowId]: updatedWorkflowValues, + [targetWorkflowId]: updatedWorkflowValues, }, }) @@ -242,7 +241,7 @@ export const useVariablesStore = create()( target: 'subblock', payload: { blockId, subblockId: subBlockId, value }, }, - workflowId: activeWorkflowId, + workflowId: targetWorkflowId, userId: 'system', }) } diff --git a/apps/sim/stores/providers/store.ts b/apps/sim/stores/providers/store.ts index 72b3523a44a..4567812e0f8 100644 --- a/apps/sim/stores/providers/store.ts +++ b/apps/sim/stores/providers/store.ts @@ -10,6 +10,7 @@ export const useProvidersStore = create((set, get) => ({ ollama: { models: [], isLoading: false }, vllm: { models: [], isLoading: false }, openrouter: { models: [], isLoading: false }, + fireworks: { models: [], isLoading: false }, }, openRouterModelInfo: {}, diff --git a/apps/sim/stores/providers/types.ts b/apps/sim/stores/providers/types.ts index e267d1c3ae0..df26e0ec247 100644 --- a/apps/sim/stores/providers/types.ts +++ b/apps/sim/stores/providers/types.ts @@ -1,4 +1,4 @@ -export type ProviderName = 'ollama' | 'vllm' | 'openrouter' | 'base' +export type ProviderName = 'ollama' | 'vllm' | 'openrouter' | 'fireworks' | 'base' export interface OpenRouterModelInfo { id: string diff --git a/apps/sim/stores/workflows/index.ts b/apps/sim/stores/workflows/index.ts index c3cc04ec6f8..e2fdf9a7c4a 100644 --- a/apps/sim/stores/workflows/index.ts +++ b/apps/sim/stores/workflows/index.ts @@ -1,4 +1,5 @@ import { createLogger } from '@sim/logger' +import { getWorkflows } from '@/hooks/queries/utils/workflow-cache' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { mergeSubblockState } from '@/stores/workflows/utils' import { useWorkflowStore } from '@/stores/workflows/workflow/store' @@ -10,14 +11,15 @@ const logger = createLogger('Workflows') * Get a workflow with its state merged in by ID * Note: Since localStorage has been removed, this only works for the active workflow * @param workflowId ID of the workflow to retrieve + * @param workspaceId Workspace containing the workflow metadata * @returns The workflow with merged state values or null if not found/not active */ -export function getWorkflowWithValues(workflowId: string) { - const { workflows } = useWorkflowRegistry.getState() +export function getWorkflowWithValues(workflowId: string, workspaceId: string) { + const workflows = getWorkflows(workspaceId) const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId - const currentState = useWorkflowStore.getState() - if (!workflows[workflowId]) { + const metadata = workflows.find((w) => w.id === workflowId) + if (!metadata) { logger.warn(`Workflow ${workflowId} not found`) return null } @@ -28,8 +30,6 @@ export function getWorkflowWithValues(workflowId: string) { return null } - const metadata = workflows[workflowId] - // Get deployment status from registry const deploymentStatus = useWorkflowRegistry.getState().getWorkflowDeploymentStatus(workflowId) @@ -77,17 +77,33 @@ export function getBlockWithValues(blockId: string): BlockState | null { /** * Get all workflows with their values merged * Note: Since localStorage has been removed, this only includes the active workflow state + * @param workspaceId Workspace containing the workflow metadata * @returns An object containing workflows, with state only for the active workflow */ -export function getAllWorkflowsWithValues() { - const { workflows } = useWorkflowRegistry.getState() - const result: Record = {} +export function getAllWorkflowsWithValues(workspaceId: string) { + const workflows = getWorkflows(workspaceId) + const result: Record< + string, + { + id: string + name: string + description?: string + color: string + folderId?: string | null + workspaceId?: string + apiKey?: string + state: WorkflowState & { isDeployed: boolean; deployedAt?: Date } + } + > = {} const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId const currentState = useWorkflowStore.getState() // Only sync the active workflow to ensure we always send valid state data - if (activeWorkflowId && workflows[activeWorkflowId]) { - const metadata = workflows[activeWorkflowId] + const activeMetadata = activeWorkflowId + ? workflows.find((w) => w.id === activeWorkflowId) + : undefined + if (activeWorkflowId && activeMetadata) { + const metadata = activeMetadata // Get deployment status from registry const deploymentStatus = useWorkflowRegistry diff --git a/apps/sim/stores/workflows/registry/store.ts b/apps/sim/stores/workflows/registry/store.ts index dca49b8ddae..4d3fece524a 100644 --- a/apps/sim/stores/workflows/registry/store.ts +++ b/apps/sim/stores/workflows/registry/store.ts @@ -1,21 +1,20 @@ import { createLogger } from '@sim/logger' import { create } from 'zustand' import { devtools } from 'zustand/middleware' -import { withOptimisticUpdate } from '@/lib/core/utils/optimistic-update' import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants' -import { getNextWorkflowColor } from '@/lib/workflows/colors' -import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults' +import { getQueryClient } from '@/app/_shell/providers/get-query-client' +import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists' import { useVariablesStore } from '@/stores/panel/variables/store' +import type { Variable } from '@/stores/panel/variables/types' import type { DeploymentStatus, HydrationState, - WorkflowMetadata, WorkflowRegistry, } from '@/stores/workflows/registry/types' import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { getUniqueBlockName, regenerateBlockIds } from '@/stores/workflows/utils' import { useWorkflowStore } from '@/stores/workflows/workflow/store' -import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types' +import type { BlockState, Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types' const logger = createLogger('WorkflowRegistry') const initialHydration: HydrationState = { @@ -28,14 +27,9 @@ const initialHydration: HydrationState = { const createRequestId = () => `${Date.now()}-${Math.random().toString(16).slice(2)}` -// Track workspace transitions to prevent race conditions -let isWorkspaceTransitioning = false -const TRANSITION_TIMEOUT = 5000 // 5 seconds maximum for workspace transitions - -// Resets workflow and subblock stores to prevent data leakage between workspaces function resetWorkflowStores() { - // Reset the workflow store to prevent data leakage between workspaces useWorkflowStore.setState({ + currentWorkflowId: null, blocks: {}, edges: [], loops: {}, @@ -44,33 +38,14 @@ function resetWorkflowStores() { lastSaved: Date.now(), }) - // Reset the subblock store useSubBlockStore.setState({ workflowValues: {}, }) } -/** - * Handles workspace transition state tracking - * @param isTransitioning Whether workspace is currently transitioning - */ -function setWorkspaceTransitioning(isTransitioning: boolean): void { - isWorkspaceTransitioning = isTransitioning - - if (isTransitioning) { - setTimeout(() => { - if (isWorkspaceTransitioning) { - logger.warn('Forcing workspace transition to complete due to timeout') - isWorkspaceTransitioning = false - } - }, TRANSITION_TIMEOUT) - } -} - export const useWorkflowRegistry = create()( devtools( (set, get) => ({ - workflows: {}, activeWorkflowId: null, error: null, deploymentStatuses: {}, @@ -78,108 +53,26 @@ export const useWorkflowRegistry = create()( clipboard: null, pendingSelection: null, - beginMetadataLoad: (workspaceId: string) => { - set((state) => ({ + switchToWorkspace: (workspaceId: string) => { + logger.info(`Switching to workspace: ${workspaceId}`) + + resetWorkflowStores() + void invalidateWorkflowLists(getQueryClient(), workspaceId) + + set({ + activeWorkflowId: null, + deploymentStatuses: {}, error: null, hydration: { - phase: 'metadata-loading', + phase: 'idle', workspaceId, workflowId: null, requestId: null, error: null, }, - })) - }, - - completeMetadataLoad: (workspaceId: string, workflows: WorkflowMetadata[]) => { - const mapped = workflows.reduce>((acc, workflow) => { - acc[workflow.id] = workflow - return acc - }, {}) - - set((state) => { - const shouldPreserveHydration = - state.hydration.phase === 'state-loading' || - (state.hydration.phase === 'ready' && - state.hydration.workflowId && - mapped[state.hydration.workflowId]) - - return { - workflows: mapped, - error: null, - hydration: shouldPreserveHydration - ? state.hydration - : { - phase: 'metadata-ready', - workspaceId, - workflowId: null, - requestId: null, - error: null, - }, - } }) }, - failMetadataLoad: (workspaceId: string | null, errorMessage: string) => { - set((state) => ({ - error: errorMessage, - hydration: { - phase: 'error', - workspaceId: workspaceId ?? state.hydration.workspaceId, - workflowId: state.hydration.workflowId, - requestId: null, - error: errorMessage, - }, - })) - }, - - switchToWorkspace: async (workspaceId: string) => { - if (isWorkspaceTransitioning) { - logger.warn( - `Ignoring workspace switch to ${workspaceId} - transition already in progress` - ) - return - } - - setWorkspaceTransitioning(true) - - try { - logger.info(`Switching to workspace: ${workspaceId}`) - - resetWorkflowStores() - - set({ - activeWorkflowId: null, - workflows: {}, - deploymentStatuses: {}, - error: null, - hydration: { - phase: 'metadata-loading', - workspaceId, - workflowId: null, - requestId: null, - error: null, - }, - }) - - logger.info(`Successfully switched to workspace: ${workspaceId}`) - } catch (error) { - logger.error(`Error switching to workspace ${workspaceId}:`, { error }) - set({ - error: `Failed to switch workspace: ${error instanceof Error ? error.message : 'Unknown error'}`, - hydration: { - phase: 'error', - workspaceId, - workflowId: null, - requestId: null, - error: error instanceof Error ? error.message : 'Unknown error', - }, - }) - } finally { - setWorkspaceTransitioning(false) - } - }, - getWorkflowDeploymentStatus: (workflowId: string | null): DeploymentStatus | null => { if (!workflowId) { workflowId = get().activeWorkflowId @@ -215,8 +108,7 @@ export const useWorkflowRegistry = create()( apiKey, needsRedeployment: isDeployed ? false - : ((state.deploymentStatuses?.[workflowId as string] as any)?.needsRedeployment ?? - false), + : (state.deploymentStatuses?.[workflowId as string]?.needsRedeployment ?? false), }, }, })) @@ -250,10 +142,9 @@ export const useWorkflowRegistry = create()( }, loadWorkflowState: async (workflowId: string) => { - const { workflows } = get() - - if (!workflows[workflowId]) { - const message = `Workflow not found: ${workflowId}` + const workspaceId = get().hydration.workspaceId + if (!workspaceId) { + const message = `Cannot load workflow ${workflowId} without a workspace scope` logger.error(message) set({ error: message }) throw new Error(message) @@ -265,7 +156,7 @@ export const useWorkflowRegistry = create()( error: null, hydration: { phase: 'state-loading', - workspaceId: state.hydration.workspaceId, + workspaceId: workspaceId ?? state.hydration.workspaceId, workflowId, requestId, error: null, @@ -279,24 +170,41 @@ export const useWorkflowRegistry = create()( } const workflowData = (await response.json()).data - let workflowState: any + const nextDeploymentStatuses = + workflowData?.isDeployed || workflowData?.deployedAt + ? { + ...get().deploymentStatuses, + [workflowId]: { + isDeployed: workflowData.isDeployed || false, + deployedAt: workflowData.deployedAt + ? new Date(workflowData.deployedAt) + : undefined, + apiKey: workflowData.apiKey || undefined, + needsRedeployment: false, + }, + } + : get().deploymentStatuses + + let workflowState: WorkflowState if (workflowData?.state) { workflowState = { + currentWorkflowId: workflowId, blocks: workflowData.state.blocks || {}, edges: workflowData.state.edges || [], loops: workflowData.state.loops || {}, parallels: workflowData.state.parallels || {}, lastSaved: Date.now(), - deploymentStatuses: {}, + deploymentStatuses: nextDeploymentStatuses, } } else { workflowState = { + currentWorkflowId: workflowId, blocks: {}, edges: [], loops: {}, parallels: {}, - deploymentStatuses: {}, + deploymentStatuses: nextDeploymentStatuses, lastSaved: Date.now(), } @@ -305,21 +213,6 @@ export const useWorkflowRegistry = create()( ) } - const nextDeploymentStatuses = - workflowData?.isDeployed || workflowData?.deployedAt - ? { - ...get().deploymentStatuses, - [workflowId]: { - isDeployed: workflowData.isDeployed || false, - deployedAt: workflowData.deployedAt - ? new Date(workflowData.deployedAt) - : undefined, - apiKey: workflowData.apiKey || undefined, - needsRedeployment: false, - }, - } - : get().deploymentStatuses - const currentHydration = get().hydration if ( currentHydration.requestId !== requestId || @@ -338,7 +231,9 @@ export const useWorkflowRegistry = create()( if (workflowData?.variables && typeof workflowData.variables === 'object') { useVariablesStore.setState((state) => { const withoutWorkflow = Object.fromEntries( - Object.entries(state.variables).filter(([, v]: any) => v.workflowId !== workflowId) + Object.entries(state.variables).filter( + (entry): entry is [string, Variable] => entry[1].workflowId !== workflowId + ) ) return { variables: { ...withoutWorkflow, ...workflowData.variables }, @@ -392,10 +287,6 @@ export const useWorkflowRegistry = create()( const workflowStoreState = useWorkflowStore.getState() const hasWorkflowData = Object.keys(workflowStoreState.blocks).length > 0 - // Skip loading only if: - // - Same workflow is already active - // - Workflow data exists - // - Hydration is complete (phase is 'ready') const isFullyHydrated = activeWorkflowId === id && hasWorkflowData && @@ -410,320 +301,16 @@ export const useWorkflowRegistry = create()( await get().loadWorkflowState(id) }, - /** - * Duplicates an existing workflow - */ - duplicateWorkflow: async (sourceId: string) => { - const { workflows } = get() - const sourceWorkflow = workflows[sourceId] - - if (!sourceWorkflow) { - set({ error: `Workflow ${sourceId} not found` }) - return null - } - - // Get the workspace ID from the source workflow (required) - const workspaceId = sourceWorkflow.workspaceId - - // Call the server to duplicate the workflow - server generates all IDs - let duplicatedWorkflow - try { - const response = await fetch(`/api/workflows/${sourceId}/duplicate`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - name: `${sourceWorkflow.name} (Copy)`, - description: sourceWorkflow.description, - color: sourceWorkflow.color, - workspaceId: workspaceId, - folderId: sourceWorkflow.folderId, - }), - }) - - if (!response.ok) { - throw new Error(`Failed to duplicate workflow: ${response.statusText}`) - } - - duplicatedWorkflow = await response.json() - logger.info( - `Successfully duplicated workflow ${sourceId} to ${duplicatedWorkflow.id} with ${duplicatedWorkflow.blocksCount} blocks, ${duplicatedWorkflow.edgesCount} edges, ${duplicatedWorkflow.subflowsCount} subflows` - ) - } catch (error) { - logger.error(`Failed to duplicate workflow ${sourceId}:`, error) - set({ - error: `Failed to duplicate workflow: ${error instanceof Error ? error.message : 'Unknown error'}`, - }) - return null - } - - const id = duplicatedWorkflow.id - - const newWorkflow: WorkflowMetadata = { - id, - name: `${sourceWorkflow.name} (Copy)`, - lastModified: new Date(), - createdAt: new Date(), - description: sourceWorkflow.description, - color: getNextWorkflowColor(), - workspaceId, - folderId: sourceWorkflow.folderId, - sortOrder: duplicatedWorkflow.sortOrder ?? 0, - } - - // Get the current workflow state to copy from - const currentWorkflowState = useWorkflowStore.getState() - - // If we're duplicating the active workflow, use current state - // Otherwise, we need to fetch it from DB or use empty state - let sourceState: any - - if (sourceId === get().activeWorkflowId) { - // Source is the active workflow, copy current state - sourceState = { - blocks: currentWorkflowState.blocks || {}, - edges: currentWorkflowState.edges || [], - loops: currentWorkflowState.loops || {}, - parallels: currentWorkflowState.parallels || {}, - } - } else { - const { workflowState } = buildDefaultWorkflowArtifacts() - sourceState = { - blocks: workflowState.blocks, - edges: workflowState.edges, - loops: workflowState.loops, - parallels: workflowState.parallels, - } - } - - // Create the new workflow state with copied content - const newState = { - blocks: sourceState.blocks, - edges: sourceState.edges, - loops: sourceState.loops, - parallels: sourceState.parallels, - workspaceId, - deploymentStatuses: {}, - lastSaved: Date.now(), - } - - // Add workflow to registry - set((state) => ({ - workflows: { - ...state.workflows, - [id]: newWorkflow, - }, - error: null, - })) - - // Copy subblock values if duplicating active workflow - if (sourceId === get().activeWorkflowId) { - const sourceSubblockValues = useSubBlockStore.getState().workflowValues[sourceId] || {} - useSubBlockStore.setState((state) => ({ - workflowValues: { - ...state.workflowValues, - [id]: sourceSubblockValues, - }, - })) - } else { - // Initialize subblock values for starter block - const subblockValues: Record> = {} - Object.entries(newState.blocks).forEach(([blockId, block]) => { - const blockState = block as any - subblockValues[blockId] = {} - Object.entries(blockState.subBlocks || {}).forEach(([subblockId, subblock]) => { - subblockValues[blockId][subblockId] = (subblock as any).value - }) - }) - - useSubBlockStore.setState((state) => ({ - workflowValues: { - ...state.workflowValues, - [id]: subblockValues, - }, - })) - } - - try { - await useVariablesStore.getState().loadForWorkflow(id) - } catch (error) { - logger.warn(`Error hydrating variables for duplicated workflow ${id}:`, error) - } - - logger.info( - `Duplicated workflow ${sourceId} to ${id} in workspace ${workspaceId || 'none'}` - ) - - return id - }, - - removeWorkflow: async (id: string) => { - const { workflows, activeWorkflowId } = get() - const workflowToDelete = workflows[id] - - if (!workflowToDelete) { - logger.warn(`Attempted to delete non-existent workflow: ${id}`) - return - } - - const isDeletingActiveWorkflow = activeWorkflowId === id - - await withOptimisticUpdate({ - getCurrentState: () => ({ - workflows: { ...get().workflows }, - activeWorkflowId: get().activeWorkflowId, - subBlockValues: { ...useSubBlockStore.getState().workflowValues }, - workflowStoreState: isDeletingActiveWorkflow - ? { - blocks: { ...useWorkflowStore.getState().blocks }, - edges: [...useWorkflowStore.getState().edges], - loops: { ...useWorkflowStore.getState().loops }, - parallels: { ...useWorkflowStore.getState().parallels }, - lastSaved: useWorkflowStore.getState().lastSaved, - } - : null, - }), - optimisticUpdate: () => { - const newWorkflows = { ...get().workflows } - delete newWorkflows[id] - - const currentSubBlockValues = useSubBlockStore.getState().workflowValues - const newWorkflowValues = { ...currentSubBlockValues } - delete newWorkflowValues[id] - useSubBlockStore.setState({ workflowValues: newWorkflowValues }) - - let newActiveWorkflowId = get().activeWorkflowId - if (isDeletingActiveWorkflow) { - newActiveWorkflowId = null - - useWorkflowStore.setState({ - blocks: {}, - edges: [], - loops: {}, - parallels: {}, - lastSaved: Date.now(), - }) - - logger.info( - `Cleared active workflow ${id} - user will need to manually select another workflow` - ) - } - - set({ - workflows: newWorkflows, - activeWorkflowId: newActiveWorkflowId, - error: null, - }) - - logger.info(`Removed workflow ${id} from local state (optimistic)`) - }, - apiCall: async () => { - const response = await fetch(`/api/workflows/${id}`, { - method: 'DELETE', - }) - - if (!response.ok) { - const error = await response.json().catch(() => ({ error: 'Unknown error' })) - throw new Error(error.error || 'Failed to delete workflow') - } - - logger.info(`Successfully deleted workflow ${id} from database`) - }, - rollback: (originalState) => { - set({ - workflows: originalState.workflows, - activeWorkflowId: originalState.activeWorkflowId, - }) - - useSubBlockStore.setState({ workflowValues: originalState.subBlockValues }) - - if (originalState.workflowStoreState) { - useWorkflowStore.getState().replaceWorkflowState(originalState.workflowStoreState) - logger.info(`Restored workflow store state for workflow ${id}`) - } - - logger.info(`Rolled back deletion of workflow ${id}`) - }, - errorMessage: `Failed to delete workflow ${id}`, - }) - }, - - updateWorkflow: async (id: string, metadata: Partial) => { - const { workflows } = get() - const workflow = workflows[id] - if (!workflow) { - logger.warn(`Cannot update workflow ${id}: not found in registry`) - return - } - - await withOptimisticUpdate({ - getCurrentState: () => workflow, - optimisticUpdate: () => { - set((state) => ({ - workflows: { - ...state.workflows, - [id]: { - ...workflow, - ...metadata, - lastModified: new Date(), - createdAt: workflow.createdAt, // Preserve creation date - }, - }, - error: null, - })) - }, - apiCall: async () => { - const response = await fetch(`/api/workflows/${id}`, { - method: 'PUT', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(metadata), - }) - - if (!response.ok) { - const error = await response.json() - throw new Error(error.error || 'Failed to update workflow') - } - - const { workflow: updatedWorkflow } = await response.json() - logger.info(`Successfully updated workflow ${id} metadata`, metadata) - - set((state) => ({ - workflows: { - ...state.workflows, - [id]: { - ...state.workflows[id], - name: updatedWorkflow.name, - description: updatedWorkflow.description, - color: updatedWorkflow.color, - folderId: updatedWorkflow.folderId, - lastModified: new Date(updatedWorkflow.updatedAt), - createdAt: updatedWorkflow.createdAt - ? new Date(updatedWorkflow.createdAt) - : state.workflows[id].createdAt, - }, - }, - })) - }, - rollback: (originalWorkflow) => { - set((state) => ({ - workflows: { - ...state.workflows, - [id]: originalWorkflow, // Revert to original state - }, - error: `Failed to update workflow: ${metadata.name ? 'name' : 'metadata'}`, - })) - }, - errorMessage: `Failed to update workflow ${id} metadata`, - }) - }, - logout: () => { logger.info('Logging out - clearing all workflow data') resetWorkflowStores() + // Clear the React Query cache to remove all server state + getQueryClient().clear() + set({ activeWorkflowId: null, - workflows: {}, deploymentStatuses: {}, error: null, hydration: initialHydration, @@ -744,7 +331,6 @@ export const useWorkflowRegistry = create()( const copiedSubBlockValues: Record> = {} const blockIdSet = new Set(blockIds) - // Auto-include nested nodes from selected subflows blockIds.forEach((blockId) => { const loop = workflowStore.loops[blockId] if (loop?.nodes) loop.nodes.forEach((n) => blockIdSet.add(n)) diff --git a/apps/sim/stores/workflows/registry/types.ts b/apps/sim/stores/workflows/registry/types.ts index 1b22fe87d04..375ee0df239 100644 --- a/apps/sim/stores/workflows/registry/types.ts +++ b/apps/sim/stores/workflows/registry/types.ts @@ -32,13 +32,7 @@ export interface WorkflowMetadata { isSandbox?: boolean } -export type HydrationPhase = - | 'idle' - | 'metadata-loading' - | 'metadata-ready' - | 'state-loading' - | 'ready' - | 'error' +export type HydrationPhase = 'idle' | 'state-loading' | 'ready' | 'error' export interface HydrationState { phase: HydrationPhase @@ -49,7 +43,6 @@ export interface HydrationState { } export interface WorkflowRegistryState { - workflows: Record activeWorkflowId: string | null error: string | null deploymentStatuses: Record @@ -59,15 +52,9 @@ export interface WorkflowRegistryState { } export interface WorkflowRegistryActions { - beginMetadataLoad: (workspaceId: string) => void - completeMetadataLoad: (workspaceId: string, workflows: WorkflowMetadata[]) => void - failMetadataLoad: (workspaceId: string | null, error: string) => void setActiveWorkflow: (id: string) => Promise loadWorkflowState: (workflowId: string) => Promise - switchToWorkspace: (id: string) => Promise - removeWorkflow: (id: string) => Promise - updateWorkflow: (id: string, metadata: Partial) => Promise - duplicateWorkflow: (sourceId: string) => Promise + switchToWorkspace: (id: string) => void getWorkflowDeploymentStatus: (workflowId: string | null) => DeploymentStatus | null setDeploymentStatus: ( workflowId: string | null, diff --git a/apps/sim/stores/workflows/workflow/store.ts b/apps/sim/stores/workflows/workflow/store.ts index 10a580c36f6..0ddbf8d420b 100644 --- a/apps/sim/stores/workflows/workflow/store.ts +++ b/apps/sim/stores/workflows/workflow/store.ts @@ -9,7 +9,6 @@ import { } from '@/lib/workflows/dynamic-handle-topology' import type { SubBlockConfig } from '@/blocks/types' import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants' -import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { filterNewEdges, @@ -102,6 +101,7 @@ function resolveInitialSubblockValue(config: SubBlockConfig): unknown { } const initialState = { + currentWorkflowId: null, blocks: {}, edges: [], loops: {}, @@ -120,6 +120,10 @@ export const useWorkflowStore = create()( set({ needsRedeployment }) }, + setCurrentWorkflowId: (currentWorkflowId) => { + set({ currentWorkflowId }) + }, + updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => { set((state) => { const block = state.blocks[id] @@ -289,7 +293,7 @@ export const useWorkflowStore = create()( }) if (subBlockValues && Object.keys(subBlockValues).length > 0) { - const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId + const activeWorkflowId = get().currentWorkflowId if (activeWorkflowId) { const subBlockStore = useSubBlockStore.getState() const updatedWorkflowValues = { @@ -343,7 +347,7 @@ export const useWorkflowStore = create()( delete newBlocks[blockId] }) - const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId + const activeWorkflowId = get().currentWorkflowId if (activeWorkflowId) { const subBlockStore = useSubBlockStore.getState() if (subBlockStore.workflowValues[activeWorkflowId]) { @@ -485,6 +489,7 @@ export const useWorkflowStore = create()( clear: () => { const newState = { + currentWorkflowId: get().currentWorkflowId, blocks: {}, edges: [], loops: {}, @@ -502,6 +507,7 @@ export const useWorkflowStore = create()( getWorkflowState: (): WorkflowState => { const state = get() return { + currentWorkflowId: state.currentWorkflowId, blocks: state.blocks, edges: state.edges, loops: state.loops, @@ -539,6 +545,10 @@ export const useWorkflowStore = create()( return { ...state, + currentWorkflowId: + nextState.currentWorkflowId !== undefined + ? nextState.currentWorkflowId + : state.currentWorkflowId, blocks: nextBlocks, edges: nextEdges, loops: nextLoops, @@ -613,7 +623,7 @@ export const useWorkflowStore = create()( const newName = getUniqueBlockName(block.name, get().blocks) - const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId + const activeWorkflowId = get().currentWorkflowId const mergedBlock = mergeSubblockState(get().blocks, activeWorkflowId || undefined, id)[id] const newSubBlocks = Object.entries(mergedBlock.subBlocks).reduce( @@ -739,7 +749,7 @@ export const useWorkflowStore = create()( // Update references in subblock store const subBlockStore = useSubBlockStore.getState() - const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId + const activeWorkflowId = get().currentWorkflowId const changedSubblocks: Array<{ blockId: string; subBlockId: string; newValue: any }> = [] if (activeWorkflowId) { @@ -1105,16 +1115,14 @@ export const useWorkflowStore = create()( }, revertToDeployedState: async (deployedState: WorkflowState) => { - const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId + const activeWorkflowId = get().currentWorkflowId if (!activeWorkflowId) { logger.error('Cannot revert: no active workflow ID') return } - const deploymentStatus = useWorkflowRegistry - .getState() - .getWorkflowDeploymentStatus(activeWorkflowId) + const deploymentStatus = get().deploymentStatuses?.[activeWorkflowId] get().replaceWorkflowState({ ...deployedState, diff --git a/apps/sim/stores/workflows/workflow/types.ts b/apps/sim/stores/workflows/workflow/types.ts index edbc606188e..21f22ff5478 100644 --- a/apps/sim/stores/workflows/workflow/types.ts +++ b/apps/sim/stores/workflows/workflow/types.ts @@ -160,6 +160,7 @@ export interface DragStartPosition { } export interface WorkflowState { + currentWorkflowId?: string | null blocks: Record edges: Edge[] lastSaved?: number @@ -239,6 +240,7 @@ export interface WorkflowActions { ) => void setBlockLocked: (id: string, locked: boolean) => void batchToggleLocked: (ids: string[]) => void + setCurrentWorkflowId: (workflowId: string | null) => void } export type WorkflowStore = WorkflowState & WorkflowActions diff --git a/apps/sim/tools/attio/assert_record.ts b/apps/sim/tools/attio/assert_record.ts index 3f1414b102c..00864d740a0 100644 --- a/apps/sim/tools/attio/assert_record.ts +++ b/apps/sim/tools/attio/assert_record.ts @@ -49,7 +49,7 @@ export const attioAssertRecordTool: ToolConfig - `https://api.attio.com/v2/objects/${params.objectType}/records?matching_attribute=${params.matchingAttribute}`, + `https://api.attio.com/v2/objects/${params.objectType.trim()}/records?matching_attribute=${params.matchingAttribute.trim()}`, method: 'PUT', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/create_list_entry.ts b/apps/sim/tools/attio/create_list_entry.ts index 381e61e1d7f..c194b3407b0 100644 --- a/apps/sim/tools/attio/create_list_entry.ts +++ b/apps/sim/tools/attio/create_list_entry.ts @@ -53,7 +53,7 @@ export const attioCreateListEntryTool: ToolConfig< }, request: { - url: (params) => `https://api.attio.com/v2/lists/${params.list}/entries`, + url: (params) => `https://api.attio.com/v2/lists/${params.list.trim()}/entries`, method: 'POST', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/create_record.ts b/apps/sim/tools/attio/create_record.ts index 3589c0072c0..4ec2bb13d33 100644 --- a/apps/sim/tools/attio/create_record.ts +++ b/apps/sim/tools/attio/create_record.ts @@ -39,7 +39,7 @@ export const attioCreateRecordTool: ToolConfig `https://api.attio.com/v2/objects/${params.objectType}/records`, + url: (params) => `https://api.attio.com/v2/objects/${params.objectType.trim()}/records`, method: 'POST', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/delete_comment.ts b/apps/sim/tools/attio/delete_comment.ts index 5fc0aed80b1..e78d71e1b33 100644 --- a/apps/sim/tools/attio/delete_comment.ts +++ b/apps/sim/tools/attio/delete_comment.ts @@ -34,7 +34,7 @@ export const attioDeleteCommentTool: ToolConfig< }, request: { - url: (params) => `https://api.attio.com/v2/comments/${params.commentId}`, + url: (params) => `https://api.attio.com/v2/comments/${params.commentId.trim()}`, method: 'DELETE', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/delete_list_entry.ts b/apps/sim/tools/attio/delete_list_entry.ts index 8aede08525d..e46da4d42ee 100644 --- a/apps/sim/tools/attio/delete_list_entry.ts +++ b/apps/sim/tools/attio/delete_list_entry.ts @@ -40,7 +40,8 @@ export const attioDeleteListEntryTool: ToolConfig< }, request: { - url: (params) => `https://api.attio.com/v2/lists/${params.list}/entries/${params.entryId}`, + url: (params) => + `https://api.attio.com/v2/lists/${params.list.trim()}/entries/${params.entryId.trim()}`, method: 'DELETE', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/delete_note.ts b/apps/sim/tools/attio/delete_note.ts index 2980801cc66..bd5cfe1a05d 100644 --- a/apps/sim/tools/attio/delete_note.ts +++ b/apps/sim/tools/attio/delete_note.ts @@ -31,7 +31,7 @@ export const attioDeleteNoteTool: ToolConfig `https://api.attio.com/v2/notes/${params.noteId}`, + url: (params) => `https://api.attio.com/v2/notes/${params.noteId.trim()}`, method: 'DELETE', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/delete_record.ts b/apps/sim/tools/attio/delete_record.ts index 82da6b68c61..cc28440030e 100644 --- a/apps/sim/tools/attio/delete_record.ts +++ b/apps/sim/tools/attio/delete_record.ts @@ -39,7 +39,7 @@ export const attioDeleteRecordTool: ToolConfig - `https://api.attio.com/v2/objects/${params.objectType}/records/${params.recordId}`, + `https://api.attio.com/v2/objects/${params.objectType.trim()}/records/${params.recordId.trim()}`, method: 'DELETE', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/delete_task.ts b/apps/sim/tools/attio/delete_task.ts index 02672f6cd3f..088f74707c3 100644 --- a/apps/sim/tools/attio/delete_task.ts +++ b/apps/sim/tools/attio/delete_task.ts @@ -31,7 +31,7 @@ export const attioDeleteTaskTool: ToolConfig `https://api.attio.com/v2/tasks/${params.taskId}`, + url: (params) => `https://api.attio.com/v2/tasks/${params.taskId.trim()}`, method: 'DELETE', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/delete_webhook.ts b/apps/sim/tools/attio/delete_webhook.ts index 17f1aa6d978..eaacde34eb6 100644 --- a/apps/sim/tools/attio/delete_webhook.ts +++ b/apps/sim/tools/attio/delete_webhook.ts @@ -34,7 +34,7 @@ export const attioDeleteWebhookTool: ToolConfig< }, request: { - url: (params) => `https://api.attio.com/v2/webhooks/${params.webhookId}`, + url: (params) => `https://api.attio.com/v2/webhooks/${params.webhookId.trim()}`, method: 'DELETE', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_comment.ts b/apps/sim/tools/attio/get_comment.ts index c32aa84f5aa..3bfa823697c 100644 --- a/apps/sim/tools/attio/get_comment.ts +++ b/apps/sim/tools/attio/get_comment.ts @@ -32,7 +32,7 @@ export const attioGetCommentTool: ToolConfig `https://api.attio.com/v2/comments/${params.commentId}`, + url: (params) => `https://api.attio.com/v2/comments/${params.commentId.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_list.ts b/apps/sim/tools/attio/get_list.ts index cf6cbb7b0f4..440b28e81c2 100644 --- a/apps/sim/tools/attio/get_list.ts +++ b/apps/sim/tools/attio/get_list.ts @@ -32,7 +32,7 @@ export const attioGetListTool: ToolConfig `https://api.attio.com/v2/lists/${params.list}`, + url: (params) => `https://api.attio.com/v2/lists/${params.list.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_list_entry.ts b/apps/sim/tools/attio/get_list_entry.ts index 8047a953e7e..22aa1c41481 100644 --- a/apps/sim/tools/attio/get_list_entry.ts +++ b/apps/sim/tools/attio/get_list_entry.ts @@ -39,7 +39,8 @@ export const attioGetListEntryTool: ToolConfig `https://api.attio.com/v2/lists/${params.list}/entries/${params.entryId}`, + url: (params) => + `https://api.attio.com/v2/lists/${params.list.trim()}/entries/${params.entryId.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_member.ts b/apps/sim/tools/attio/get_member.ts index 3469856c5be..11f2c08c07d 100644 --- a/apps/sim/tools/attio/get_member.ts +++ b/apps/sim/tools/attio/get_member.ts @@ -32,7 +32,7 @@ export const attioGetMemberTool: ToolConfig `https://api.attio.com/v2/workspace_members/${params.memberId}`, + url: (params) => `https://api.attio.com/v2/workspace_members/${params.memberId.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_note.ts b/apps/sim/tools/attio/get_note.ts index 7637edc40d7..0a9ad4a8e76 100644 --- a/apps/sim/tools/attio/get_note.ts +++ b/apps/sim/tools/attio/get_note.ts @@ -32,7 +32,7 @@ export const attioGetNoteTool: ToolConfig `https://api.attio.com/v2/notes/${params.noteId}`, + url: (params) => `https://api.attio.com/v2/notes/${params.noteId.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_object.ts b/apps/sim/tools/attio/get_object.ts index 5f67495dda2..09ecc9c267f 100644 --- a/apps/sim/tools/attio/get_object.ts +++ b/apps/sim/tools/attio/get_object.ts @@ -32,7 +32,7 @@ export const attioGetObjectTool: ToolConfig `https://api.attio.com/v2/objects/${params.object}`, + url: (params) => `https://api.attio.com/v2/objects/${params.object.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_record.ts b/apps/sim/tools/attio/get_record.ts index 979492c6aed..2ae04ddbad7 100644 --- a/apps/sim/tools/attio/get_record.ts +++ b/apps/sim/tools/attio/get_record.ts @@ -39,7 +39,7 @@ export const attioGetRecordTool: ToolConfig - `https://api.attio.com/v2/objects/${params.objectType}/records/${params.recordId}`, + `https://api.attio.com/v2/objects/${params.objectType.trim()}/records/${params.recordId.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_task.ts b/apps/sim/tools/attio/get_task.ts new file mode 100644 index 00000000000..82ac333861c --- /dev/null +++ b/apps/sim/tools/attio/get_task.ts @@ -0,0 +1,78 @@ +import { createLogger } from '@sim/logger' +import type { ToolConfig } from '@/tools/types' +import type { AttioGetTaskParams, AttioGetTaskResponse } from './types' +import { TASK_OUTPUT_PROPERTIES } from './types' + +const logger = createLogger('AttioGetTask') + +export const attioGetTaskTool: ToolConfig = { + id: 'attio_get_task', + name: 'Attio Get Task', + description: 'Get a single task by ID from Attio', + version: '1.0.0', + + oauth: { + required: true, + provider: 'attio', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'The OAuth access token for the Attio API', + }, + taskId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The ID of the task to retrieve', + }, + }, + + request: { + url: (params) => `https://api.attio.com/v2/tasks/${params.taskId.trim()}`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + 'Content-Type': 'application/json', + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!response.ok) { + logger.error('Attio API request failed', { data, status: response.status }) + throw new Error(data.message || 'Failed to get task') + } + const task = data.data + const linkedRecords = (task.linked_records ?? []).map( + (r: { target_object_id?: string; target_record_id?: string }) => ({ + targetObjectId: r.target_object_id ?? null, + targetRecordId: r.target_record_id ?? null, + }) + ) + const assignees = (task.assignees ?? []).map( + (a: { referenced_actor_type?: string; referenced_actor_id?: string }) => ({ + type: a.referenced_actor_type ?? null, + id: a.referenced_actor_id ?? null, + }) + ) + return { + success: true, + output: { + taskId: task.id?.task_id ?? null, + content: task.content_plaintext ?? null, + deadlineAt: task.deadline_at ?? null, + isCompleted: task.is_completed ?? false, + linkedRecords, + assignees, + createdByActor: task.created_by_actor ?? null, + createdAt: task.created_at ?? null, + }, + } + }, + + outputs: TASK_OUTPUT_PROPERTIES, +} diff --git a/apps/sim/tools/attio/get_thread.ts b/apps/sim/tools/attio/get_thread.ts index 83bc0fec3c5..62f0a47f1d7 100644 --- a/apps/sim/tools/attio/get_thread.ts +++ b/apps/sim/tools/attio/get_thread.ts @@ -32,7 +32,7 @@ export const attioGetThreadTool: ToolConfig `https://api.attio.com/v2/threads/${params.threadId}`, + url: (params) => `https://api.attio.com/v2/threads/${params.threadId.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/get_webhook.ts b/apps/sim/tools/attio/get_webhook.ts index 826899187e5..7cc76f9e1dc 100644 --- a/apps/sim/tools/attio/get_webhook.ts +++ b/apps/sim/tools/attio/get_webhook.ts @@ -32,7 +32,7 @@ export const attioGetWebhookTool: ToolConfig `https://api.attio.com/v2/webhooks/${params.webhookId}`, + url: (params) => `https://api.attio.com/v2/webhooks/${params.webhookId.trim()}`, method: 'GET', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/index.ts b/apps/sim/tools/attio/index.ts index 7212fc424af..978993b87de 100644 --- a/apps/sim/tools/attio/index.ts +++ b/apps/sim/tools/attio/index.ts @@ -20,6 +20,7 @@ export { attioGetMemberTool } from './get_member' export { attioGetNoteTool } from './get_note' export { attioGetObjectTool } from './get_object' export { attioGetRecordTool } from './get_record' +export { attioGetTaskTool } from './get_task' export { attioGetThreadTool } from './get_thread' export { attioGetWebhookTool } from './get_webhook' export { attioListListsTool } from './list_lists' diff --git a/apps/sim/tools/attio/list_records.ts b/apps/sim/tools/attio/list_records.ts index 7fae18d4437..39cfef28349 100644 --- a/apps/sim/tools/attio/list_records.ts +++ b/apps/sim/tools/attio/list_records.ts @@ -56,7 +56,7 @@ export const attioListRecordsTool: ToolConfig `https://api.attio.com/v2/objects/${params.objectType}/records/query`, + url: (params) => `https://api.attio.com/v2/objects/${params.objectType.trim()}/records/query`, method: 'POST', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/query_list_entries.ts b/apps/sim/tools/attio/query_list_entries.ts index d234c2aec37..6b0574d88b9 100644 --- a/apps/sim/tools/attio/query_list_entries.ts +++ b/apps/sim/tools/attio/query_list_entries.ts @@ -60,7 +60,7 @@ export const attioQueryListEntriesTool: ToolConfig< }, request: { - url: (params) => `https://api.attio.com/v2/lists/${params.list}/entries/query`, + url: (params) => `https://api.attio.com/v2/lists/${params.list.trim()}/entries/query`, method: 'POST', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/types.ts b/apps/sim/tools/attio/types.ts index cb7c7264262..0f2bb2a9c22 100644 --- a/apps/sim/tools/attio/types.ts +++ b/apps/sim/tools/attio/types.ts @@ -529,6 +529,26 @@ export interface AttioUpdateTaskResponse extends ToolResponse { } } +/** Params for getting a single task */ +export interface AttioGetTaskParams { + accessToken: string + taskId: string +} + +/** Response for getting a single task */ +export interface AttioGetTaskResponse extends ToolResponse { + output: { + taskId: string | null + content: string | null + deadlineAt: string | null + isCompleted: boolean + linkedRecords: Array<{ targetObjectId: string | null; targetRecordId: string | null }> + assignees: Array<{ type: string | null; id: string | null }> + createdByActor: unknown + createdAt: string | null + } +} + /** Response for deleting a task */ export interface AttioDeleteTaskResponse extends ToolResponse { output: { @@ -1093,6 +1113,7 @@ export type AttioResponse = | AttioListTasksResponse | AttioCreateTaskResponse | AttioUpdateTaskResponse + | AttioGetTaskResponse | AttioDeleteTaskResponse | AttioListObjectsResponse | AttioGetObjectResponse diff --git a/apps/sim/tools/attio/update_list.ts b/apps/sim/tools/attio/update_list.ts index e54176a128a..e36f74b2ece 100644 --- a/apps/sim/tools/attio/update_list.ts +++ b/apps/sim/tools/attio/update_list.ts @@ -58,7 +58,7 @@ export const attioUpdateListTool: ToolConfig `https://api.attio.com/v2/lists/${params.list}`, + url: (params) => `https://api.attio.com/v2/lists/${params.list.trim()}`, method: 'PATCH', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/update_list_entry.ts b/apps/sim/tools/attio/update_list_entry.ts index d6da7dc01e1..760e960cdfc 100644 --- a/apps/sim/tools/attio/update_list_entry.ts +++ b/apps/sim/tools/attio/update_list_entry.ts @@ -47,7 +47,8 @@ export const attioUpdateListEntryTool: ToolConfig< }, request: { - url: (params) => `https://api.attio.com/v2/lists/${params.list}/entries/${params.entryId}`, + url: (params) => + `https://api.attio.com/v2/lists/${params.list.trim()}/entries/${params.entryId.trim()}`, method: 'PATCH', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/update_object.ts b/apps/sim/tools/attio/update_object.ts index 1b8c0024a9a..0136bae05dd 100644 --- a/apps/sim/tools/attio/update_object.ts +++ b/apps/sim/tools/attio/update_object.ts @@ -51,7 +51,7 @@ export const attioUpdateObjectTool: ToolConfig `https://api.attio.com/v2/objects/${params.object}`, + url: (params) => `https://api.attio.com/v2/objects/${params.object.trim()}`, method: 'PATCH', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/update_record.ts b/apps/sim/tools/attio/update_record.ts index cf74d9a3b78..21d613f9292 100644 --- a/apps/sim/tools/attio/update_record.ts +++ b/apps/sim/tools/attio/update_record.ts @@ -46,7 +46,7 @@ export const attioUpdateRecordTool: ToolConfig - `https://api.attio.com/v2/objects/${params.objectType}/records/${params.recordId}`, + `https://api.attio.com/v2/objects/${params.objectType.trim()}/records/${params.recordId.trim()}`, method: 'PATCH', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/update_task.ts b/apps/sim/tools/attio/update_task.ts index 9b1e4e21e82..e55394b44ce 100644 --- a/apps/sim/tools/attio/update_task.ts +++ b/apps/sim/tools/attio/update_task.ts @@ -56,7 +56,7 @@ export const attioUpdateTaskTool: ToolConfig `https://api.attio.com/v2/tasks/${params.taskId}`, + url: (params) => `https://api.attio.com/v2/tasks/${params.taskId.trim()}`, method: 'PATCH', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, diff --git a/apps/sim/tools/attio/update_webhook.ts b/apps/sim/tools/attio/update_webhook.ts index 662691c4fde..659bb8f9648 100644 --- a/apps/sim/tools/attio/update_webhook.ts +++ b/apps/sim/tools/attio/update_webhook.ts @@ -34,41 +34,38 @@ export const attioUpdateWebhookTool: ToolConfig< }, targetUrl: { type: 'string', - required: true, + required: false, visibility: 'user-or-llm', description: 'HTTPS target URL for webhook delivery', }, subscriptions: { type: 'string', - required: true, + required: false, visibility: 'user-or-llm', description: 'JSON array of subscriptions, e.g. [{"event_type":"note.created"}]', }, }, request: { - url: (params) => `https://api.attio.com/v2/webhooks/${params.webhookId}`, + url: (params) => `https://api.attio.com/v2/webhooks/${params.webhookId.trim()}`, method: 'PATCH', headers: (params) => ({ Authorization: `Bearer ${params.accessToken}`, 'Content-Type': 'application/json', }), body: (params) => { - let subscriptions: unknown = [] + const data: Record = {} + if (params.targetUrl) data.target_url = params.targetUrl.trim() if (params.subscriptions) { try { - subscriptions = + data.subscriptions = typeof params.subscriptions === 'string' ? JSON.parse(params.subscriptions) : params.subscriptions } catch { - subscriptions = [] + data.subscriptions = [] } } - const data: Record = { - target_url: params.targetUrl, - subscriptions, - } return { data } }, }, diff --git a/apps/sim/tools/extend/index.ts b/apps/sim/tools/extend/index.ts new file mode 100644 index 00000000000..cf20cf8daed --- /dev/null +++ b/apps/sim/tools/extend/index.ts @@ -0,0 +1 @@ +export { extendParserTool, extendParserV2Tool } from '@/tools/extend/parser' diff --git a/apps/sim/tools/extend/parser.ts b/apps/sim/tools/extend/parser.ts new file mode 100644 index 00000000000..4e7dab956b5 --- /dev/null +++ b/apps/sim/tools/extend/parser.ts @@ -0,0 +1,250 @@ +import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils' +import type { + ExtendParserInput, + ExtendParserOutput, + ExtendParserV2Input, +} from '@/tools/extend/types' +import type { ToolConfig } from '@/tools/types' + +export const extendParserTool: ToolConfig = { + id: 'extend_parser', + name: 'Extend Document Parser', + description: 'Parse and extract content from documents using Extend AI', + version: '1.0.0', + + params: { + filePath: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'URL to a document to be processed', + }, + file: { + type: 'file', + required: false, + visibility: 'user-only', + description: 'Document file to be processed', + }, + fileUpload: { + type: 'object', + required: false, + visibility: 'hidden', + description: 'File upload data from file-upload component', + }, + outputFormat: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Target output format (markdown or spatial). Defaults to markdown.', + }, + chunking: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Chunking strategy (page, document, or section). Defaults to page.', + }, + engine: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: + 'Parsing engine (parse_performance or parse_light). Defaults to parse_performance.', + }, + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Extend API key', + }, + }, + + request: { + url: '/api/tools/extend/parse', + method: 'POST', + headers: (params) => ({ + 'Content-Type': 'application/json', + Accept: 'application/json', + Authorization: `Bearer ${params.apiKey}`, + }), + body: (params) => { + if (!params || typeof params !== 'object') { + throw new Error('Invalid parameters: Parameters must be provided as an object') + } + + if (!params.apiKey || typeof params.apiKey !== 'string' || params.apiKey.trim() === '') { + throw new Error('Missing or invalid API key: A valid Extend API key is required') + } + + const requestBody: Record = { + apiKey: params.apiKey, + } + + const fileInput = + params.file && typeof params.file === 'object' ? params.file : params.fileUpload + const hasFileUpload = fileInput && typeof fileInput === 'object' + const hasFilePath = + typeof params.filePath === 'string' && + params.filePath !== 'null' && + params.filePath.trim() !== '' + + if (hasFilePath) { + const filePathToValidate = params.filePath!.trim() + + if (filePathToValidate.startsWith('/')) { + if (!isInternalFileUrl(filePathToValidate)) { + throw new Error( + 'Invalid file path. Only uploaded files are supported for internal paths.' + ) + } + requestBody.filePath = filePathToValidate + } else { + let url + try { + url = new URL(filePathToValidate) + + if (!['http:', 'https:'].includes(url.protocol)) { + throw new Error( + `Invalid protocol: ${url.protocol}. URL must use HTTP or HTTPS protocol` + ) + } + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + throw new Error( + `Invalid URL format: ${errorMessage}. Please provide a valid HTTP or HTTPS URL to a document.` + ) + } + + requestBody.filePath = url.toString() + } + } else if (hasFileUpload) { + requestBody.file = fileInput + } else { + throw new Error('Missing file input: Please provide a document URL or upload a file') + } + + if (params.outputFormat && ['markdown', 'spatial'].includes(params.outputFormat)) { + requestBody.outputFormat = params.outputFormat + } + + if (params.chunking && ['page', 'document', 'section'].includes(params.chunking)) { + requestBody.chunking = params.chunking + } + + if (params.engine && ['parse_performance', 'parse_light'].includes(params.engine)) { + requestBody.engine = params.engine + } + + return requestBody + }, + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data || typeof data !== 'object') { + throw new Error('Invalid response format from Extend API') + } + + const extendData = data.output ?? data + + return { + success: true, + output: { + id: extendData.id ?? null, + status: extendData.status ?? null, + chunks: extendData.chunks ?? [], + blocks: extendData.blocks ?? [], + pageCount: extendData.pageCount ?? extendData.page_count ?? null, + creditsUsed: extendData.creditsUsed ?? extendData.credits_used ?? null, + }, + } + }, + + outputs: { + id: { type: 'string', description: 'Unique identifier for the parser run' }, + status: { type: 'string', description: 'Processing status' }, + chunks: { + type: 'json', + description: 'Parsed document content chunks', + }, + blocks: { + type: 'json', + description: 'Block-level document elements with type and content', + }, + pageCount: { + type: 'number', + description: 'Number of pages processed', + optional: true, + }, + creditsUsed: { + type: 'number', + description: 'API credits consumed', + optional: true, + }, + }, +} + +export const extendParserV2Tool: ToolConfig = { + ...extendParserTool, + id: 'extend_parser_v2', + name: 'Extend Document Parser', + postProcess: undefined, + directExecution: undefined, + transformResponse: extendParserTool.transformResponse + ? (response: Response, params?: ExtendParserV2Input) => + extendParserTool.transformResponse!(response, params as unknown as ExtendParserInput) + : undefined, + params: { + file: { + type: 'file', + required: true, + visibility: 'user-only', + description: 'Document to be processed', + }, + outputFormat: extendParserTool.params.outputFormat, + chunking: extendParserTool.params.chunking, + engine: extendParserTool.params.engine, + apiKey: extendParserTool.params.apiKey, + }, + request: { + url: '/api/tools/extend/parse', + method: 'POST', + headers: (params) => ({ + 'Content-Type': 'application/json', + Accept: 'application/json', + Authorization: `Bearer ${params.apiKey}`, + }), + body: (params: ExtendParserV2Input) => { + if (!params || typeof params !== 'object') { + throw new Error('Invalid parameters: Parameters must be provided as an object') + } + + if (!params.apiKey || typeof params.apiKey !== 'string' || params.apiKey.trim() === '') { + throw new Error('Missing or invalid API key: A valid Extend API key is required') + } + + if (!params.file || typeof params.file !== 'object') { + throw new Error('Missing or invalid file: Please provide a file object') + } + + const requestBody: Record = { + apiKey: params.apiKey, + file: params.file, + } + + if (params.outputFormat && ['markdown', 'spatial'].includes(params.outputFormat)) { + requestBody.outputFormat = params.outputFormat + } + + if (params.chunking && ['page', 'document', 'section'].includes(params.chunking)) { + requestBody.chunking = params.chunking + } + + if (params.engine && ['parse_performance', 'parse_light'].includes(params.engine)) { + requestBody.engine = params.engine + } + + return requestBody + }, + }, +} diff --git a/apps/sim/tools/extend/types.ts b/apps/sim/tools/extend/types.ts new file mode 100644 index 00000000000..dd65c126e09 --- /dev/null +++ b/apps/sim/tools/extend/types.ts @@ -0,0 +1,89 @@ +import type { RawFileInput } from '@/lib/uploads/utils/file-utils' +import type { UserFile } from '@/executor/types' +import type { ToolResponse } from '@/tools/types' + +/** + * Input parameters for the Extend parser tool + */ +export interface ExtendParserInput { + /** URL to a document to be processed */ + filePath?: string + + file?: RawFileInput + + /** File upload data (from file-upload component) */ + fileUpload?: RawFileInput + + /** Extend API key for authentication */ + apiKey: string + + /** Target output format */ + outputFormat?: 'markdown' | 'spatial' + + /** Chunking strategy */ + chunking?: 'page' | 'document' | 'section' + + /** Parsing engine */ + engine?: 'parse_performance' | 'parse_light' +} + +export interface ExtendParserV2Input { + /** File to be processed */ + file: UserFile + + /** Extend API key for authentication */ + apiKey: string + + /** Target output format */ + outputFormat?: 'markdown' | 'spatial' + + /** Chunking strategy */ + chunking?: 'page' | 'document' | 'section' + + /** Parsing engine */ + engine?: 'parse_performance' | 'parse_light' +} + +/** + * Chunk from parsed document + */ +export interface ExtendParseChunk { + content: string + page?: number + metadata?: Record +} + +/** + * Block-level element from parsed document + */ +export interface ExtendParseBlock { + type: string + content: string + bbox?: { + left: number + top: number + width: number + height: number + page: number + } + metadata?: Record +} + +/** + * Native Extend API response structure for parsing + */ +export interface ExtendParserOutputData { + id: string + status: string + chunks: ExtendParseChunk[] + blocks: ExtendParseBlock[] + pageCount: number | null + creditsUsed: number | null +} + +/** + * Complete response from the Extend parser tool + */ +export interface ExtendParserOutput extends ToolResponse { + output: ExtendParserOutputData +} diff --git a/apps/sim/tools/index.test.ts b/apps/sim/tools/index.test.ts index c5beca880b7..a55aefa1ee2 100644 --- a/apps/sim/tools/index.test.ts +++ b/apps/sim/tools/index.test.ts @@ -16,16 +16,19 @@ import { import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' // Hoisted mock state - these are available to vi.mock factories -const { mockIsHosted, mockEnv, mockGetBYOKKey, mockRateLimiterFns } = vi.hoisted(() => ({ - mockIsHosted: { value: false }, - mockEnv: { NEXT_PUBLIC_APP_URL: 'http://localhost:3000' } as Record, - mockGetBYOKKey: vi.fn(), - mockRateLimiterFns: { - acquireKey: vi.fn(), - preConsumeCapacity: vi.fn(), - consumeCapacity: vi.fn(), - }, -})) +const { mockIsHosted, mockEnv, mockGetBYOKKey, mockGetToolAsync, mockRateLimiterFns } = vi.hoisted( + () => ({ + mockIsHosted: { value: false }, + mockEnv: { NEXT_PUBLIC_APP_URL: 'http://localhost:3000' } as Record, + mockGetBYOKKey: vi.fn(), + mockGetToolAsync: vi.fn(), + mockRateLimiterFns: { + acquireKey: vi.fn(), + preConsumeCapacity: vi.fn(), + consumeCapacity: vi.fn(), + }, + }) +) // Mock feature flags vi.mock('@/lib/core/config/feature-flags', () => ({ @@ -176,27 +179,12 @@ vi.mock('@/tools/registry', () => { params: {}, request: { url: '/api/tools/serper/search', method: 'GET' }, }, - 'custom_custom-tool-123': { - id: 'custom_custom-tool-123', - name: 'Custom Weather Tool', - description: 'Get weather information', - version: '1.0.0', - params: { - location: { type: 'string', required: true, description: 'City name' }, - unit: { type: 'string', required: false, description: 'Unit (metric/imperial)' }, - }, - request: { - url: '/api/function/execute', - method: 'POST', - headers: () => ({ 'Content-Type': 'application/json' }), - }, - }, } return { tools: mockTools } }) // Mock custom tools - define mock data inside factory function -vi.mock('@/hooks/queries/custom-tools', () => { +vi.mock('@/hooks/queries/utils/custom-tool-cache', () => { const mockCustomTool = { id: 'custom-tool-123', title: 'Custom Weather Tool', @@ -226,9 +214,19 @@ vi.mock('@/hooks/queries/custom-tools', () => { } }) -import { executeTool } from '@/tools' +vi.mock('@/tools/utils.server', async (importOriginal) => { + const actual = await importOriginal() + mockGetToolAsync.mockImplementation(actual.getToolAsync) + return { + ...actual, + getToolAsync: mockGetToolAsync, + } +}) + +import { executeTool, postProcessToolOutput } from '@/tools' import { tools } from '@/tools/registry' import { getTool } from '@/tools/utils' +import { getToolAsync } from '@/tools/utils.server' /** * Sets up global fetch mock with Next.js preconnect support. @@ -304,18 +302,45 @@ describe('Tools Registry', () => { }) describe('Custom Tools', () => { - it('should get custom tool by ID', () => { - const customTool = getTool('custom_custom-tool-123') - expect(customTool).toBeDefined() - expect(customTool?.name).toBe('Custom Weather Tool') - expect(customTool?.description).toBe('Get weather information') - expect(customTool?.params.location).toBeDefined() - expect(customTool?.params.location.required).toBe(true) + it('does not resolve custom tools through the synchronous client helper', () => { + expect(getTool('custom_remote-tool-123', 'workspace-1')).toBeUndefined() }) - it('should handle non-existent custom tool', () => { - const nonExistentTool = getTool('custom_non-existent') - expect(nonExistentTool).toBeUndefined() + it('resolves custom tools through the async helper', async () => { + setupFetchMock({ + json: { + data: [ + { + id: 'remote-tool-123', + title: 'Custom Weather Tool', + schema: { + function: { + name: 'weather_tool', + description: 'Get weather information', + parameters: { + type: 'object', + properties: { + location: { type: 'string', description: 'City name' }, + }, + required: ['location'], + }, + }, + }, + }, + ], + }, + status: 200, + headers: { 'content-type': 'application/json' }, + }) + + const customTool = await getToolAsync('custom_remote-tool-123', { + workflowId: 'workflow-1', + userId: 'user-1', + workspaceId: 'workspace-1', + }) + + expect(customTool?.name).toBe('Custom Weather Tool') + expect(customTool?.params.location.required).toBe(true) }) }) @@ -1962,44 +1987,17 @@ describe('stripInternalFields Safety', () => { }) it('should preserve __-prefixed fields in custom tool output', async () => { - const mockTool = { - id: 'custom_test-preserve-dunder', - name: 'Custom Preserve Dunder', - description: 'A custom tool whose output has __ fields', - version: '1.0.0', - params: {}, - request: { - url: '/api/function/execute', - method: 'POST' as const, - headers: () => ({ 'Content-Type': 'application/json' }), - }, - transformResponse: vi.fn().mockResolvedValue({ - success: true, - output: { result: 'ok', __metadata: { source: 'user' }, __tag: 'important' }, - }), - } - - const originalTools = { ...tools } - ;(tools as any)['custom_test-preserve-dunder'] = mockTool - - global.fetch = Object.assign( - vi.fn().mockImplementation(async () => ({ - ok: true, - status: 200, - headers: new Headers(), - json: () => Promise.resolve({ success: true }), - })), - { preconnect: vi.fn() } - ) as typeof fetch - - const result = await executeTool('custom_test-preserve-dunder', {}, true) - - expect(result.success).toBe(true) - expect(result.output.result).toBe('ok') - expect(result.output.__metadata).toEqual({ source: 'user' }) - expect(result.output.__tag).toBe('important') + const output = postProcessToolOutput('custom_test-preserve-dunder', { + result: 'ok', + __metadata: { source: 'user' }, + __tag: 'important', + }) - Object.assign(tools, originalTools) + expect(output).toEqual({ + result: 'ok', + __metadata: { source: 'user' }, + __tag: 'important', + }) }) }) diff --git a/apps/sim/tools/index.ts b/apps/sim/tools/index.ts index c354b691c2b..290960873a1 100644 --- a/apps/sim/tools/index.ts +++ b/apps/sim/tools/index.ts @@ -26,15 +26,40 @@ import type { ToolResponse, ToolRetryConfig, } from '@/tools/types' -import { - formatRequestParams, - getTool, - getToolAsync, - validateRequiredParametersAfterMerge, -} from '@/tools/utils' +import { formatRequestParams, getTool, validateRequiredParametersAfterMerge } from '@/tools/utils' +import * as toolsUtilsServer from '@/tools/utils.server' const logger = createLogger('Tools') +interface ToolExecutionScope { + workspaceId?: string + workflowId?: string + userId?: string + executionId?: string + callChain?: string[] + isDeployedContext?: boolean + enforceCredentialAccess?: boolean +} + +function resolveToolScope( + params: Record, + executionContext?: ExecutionContext +): ToolExecutionScope { + const ctx = params._context as Record | undefined + return { + workspaceId: (executionContext?.workspaceId ?? ctx?.workspaceId) as string | undefined, + workflowId: (executionContext?.workflowId ?? ctx?.workflowId) as string | undefined, + userId: (executionContext?.userId ?? ctx?.userId) as string | undefined, + executionId: (executionContext?.executionId ?? ctx?.executionId) as string | undefined, + callChain: (executionContext?.callChain ?? ctx?.callChain) as string[] | undefined, + isDeployedContext: (executionContext?.isDeployedContext ?? ctx?.isDeployedContext) as + | boolean + | undefined, + enforceCredentialAccess: (executionContext?.enforceCredentialAccess ?? + ctx?.enforceCredentialAccess) as boolean | undefined, + } +} + /** Result from hosted key injection */ interface HostedKeyInjectionResult { isUsingHostedKey: boolean @@ -57,11 +82,7 @@ async function injectHostedKeyIfNeeded( const { envKeyPrefix, apiKeyParam, byokProviderId, rateLimit } = tool.hosting - // Derive workspace/user/workflow IDs from executionContext or params._context - const ctx = params._context as Record | undefined - const workspaceId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) - const userId = executionContext?.userId || (ctx?.userId as string | undefined) - const workflowId = executionContext?.workflowId || (ctx?.workflowId as string | undefined) + const { workspaceId, userId, workflowId } = resolveToolScope(params, executionContext) // Check BYOK workspace key first if (byokProviderId && workspaceId) { @@ -277,10 +298,7 @@ async function processHostedKeyCost( if (cost <= 0) return { cost: 0 } - const ctx = params._context as Record | undefined - const userId = executionContext?.userId || (ctx?.userId as string | undefined) - const wsId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) - const wfId = executionContext?.workflowId || (ctx?.workflowId as string | undefined) + const { userId } = resolveToolScope(params, executionContext) if (!userId) return { cost, metadata } @@ -305,8 +323,7 @@ async function reportCustomDimensionUsage( requestId: string ): Promise { if (tool.hosting?.rateLimit.mode !== 'custom') return - const ctx = params._context as Record | undefined - const billingActorId = executionContext?.workspaceId || (ctx?.workspaceId as string | undefined) + const { workspaceId: billingActorId } = resolveToolScope(params, executionContext) if (!billingActorId) return const rateLimiter = getHostedKeyRateLimiter() @@ -353,6 +370,10 @@ function stripInternalFields(output: Record): Record) { + return isCustomTool(toolId) ? output : stripInternalFields(output) +} + /** * Apply post-execution hosted-key cost tracking to a successful tool result. * Reports custom dimension usage, calculates cost, and merges it into the output. @@ -599,18 +620,19 @@ export async function executeTool( // Normalize tool ID to strip resource suffixes (e.g., workflow_executor_ -> workflow_executor) const normalizedToolId = normalizeToolId(toolId) + const scope = resolveToolScope(params, executionContext) + // Handle load_skill tool for agent skills progressive disclosure if (normalizedToolId === 'load_skill') { const skillName = params.skill_name - const workspaceId = params._context?.workspaceId - if (!skillName || !workspaceId) { + if (!skillName || !scope.workspaceId) { return { success: false, output: { error: 'Missing skill_name or workspace context' }, error: 'Missing skill_name or workspace context', } } - const content = await resolveSkillContent(skillName, workspaceId) + const content = await resolveSkillContent(skillName, scope.workspaceId) if (!content) { return { success: false, @@ -624,11 +646,13 @@ export async function executeTool( } } - // If it's a custom tool, use the async version with workflowId + // If it's a custom tool, use the async version if (isCustomTool(normalizedToolId)) { - const workflowId = params._context?.workflowId - const userId = params._context?.userId - tool = await getToolAsync(normalizedToolId, workflowId, userId) + tool = await toolsUtilsServer.getToolAsync(normalizedToolId, { + workflowId: scope.workflowId, + userId: scope.userId, + workspaceId: scope.workspaceId, + }) if (!tool) { logger.error(`[${requestId}] Custom tool not found: ${normalizedToolId}`) } @@ -799,9 +823,7 @@ export async function executeTool( ) } - const strippedOutput = isCustomTool(normalizedToolId) - ? finalResult.output - : stripInternalFields(finalResult.output ?? {}) + const strippedOutput = postProcessToolOutput(normalizedToolId, finalResult.output ?? {}) return { ...finalResult, @@ -856,9 +878,7 @@ export async function executeTool( ) } - const strippedOutput = isCustomTool(normalizedToolId) - ? finalResult.output - : stripInternalFields(finalResult.output ?? {}) + const strippedOutput = postProcessToolOutput(normalizedToolId, finalResult.output ?? {}) return { ...finalResult, @@ -1556,17 +1576,13 @@ async function executeMcpTool( ) } - const workspaceId = params._context?.workspaceId || executionContext?.workspaceId - const workflowId = params._context?.workflowId || executionContext?.workflowId - const userId = params._context?.userId || executionContext?.userId - const callChain = - (params._context?.callChain as string[] | undefined) || executionContext?.callChain + const mcpScope = resolveToolScope(params, executionContext) - if (callChain && callChain.length > 0) { - headers[SIM_VIA_HEADER] = serializeCallChain(callChain) + if (mcpScope.callChain && mcpScope.callChain.length > 0) { + headers[SIM_VIA_HEADER] = serializeCallChain(mcpScope.callChain) } - if (!workspaceId) { + if (!mcpScope.workspaceId) { return { success: false, output: {}, @@ -1586,8 +1602,8 @@ async function executeMcpTool( serverId, toolName, arguments: toolArguments, - workflowId, // Pass workflow context for user resolution - workspaceId, // Pass workspace context for scoping + workflowId: mcpScope.workflowId, + workspaceId: mcpScope.workspaceId, } // Include schema to skip discovery on execution @@ -1601,14 +1617,14 @@ async function executeMcpTool( validateRequestBodySize(body, actualRequestId, `mcp:${toolId}`) logger.info(`[${actualRequestId}] Making MCP tool request to ${toolName} on ${serverId}`, { - hasWorkspaceId: !!workspaceId, - hasWorkflowId: !!workflowId, + hasWorkspaceId: !!mcpScope.workspaceId, + hasWorkflowId: !!mcpScope.workflowId, hasToolSchema: !!toolSchema, }) const mcpUrl = new URL('/api/mcp/tools/execute', baseUrl) - if (userId) { - mcpUrl.searchParams.set('userId', userId) + if (mcpScope.userId) { + mcpUrl.searchParams.set('userId', mcpScope.userId) } const response = await fetch(mcpUrl.toString(), { diff --git a/apps/sim/tools/knowledge/create_document.ts b/apps/sim/tools/knowledge/create_document.ts index e209a0e9bd2..feaf8ac3960 100644 --- a/apps/sim/tools/knowledge/create_document.ts +++ b/apps/sim/tools/knowledge/create_document.ts @@ -103,9 +103,6 @@ export const knowledgeCreateDocumentTool: ToolConfig = { + id: 'launchdarkly_create_flag', + name: 'LaunchDarkly Create Flag', + description: 'Create a new feature flag in a LaunchDarkly project.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key to create the flag in', + }, + name: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Human-readable name for the feature flag', + }, + key: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Unique key for the feature flag (used in code)', + }, + description: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Description of the feature flag', + }, + tags: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated list of tags', + }, + temporary: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether the flag is temporary (default true)', + }, + }, + + request: { + url: (params) => + `https://app.launchdarkly.com/api/v2/flags/${encodeURIComponent(params.projectKey.trim())}`, + method: 'POST', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + 'Content-Type': 'application/json', + }), + body: (params) => { + const body: Record = { + name: params.name, + key: params.key, + } + if (params.description) body.description = params.description + if (params.tags) body.tags = params.tags.split(',').map((t) => t.trim()) + if (params.temporary !== undefined) body.temporary = params.temporary + return body + }, + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { + success: false, + output: { + key: '', + name: '', + kind: '', + description: null, + temporary: false, + archived: false, + deprecated: false, + creationDate: 0, + tags: [], + variations: [], + maintainerId: null, + }, + error: error.message, + } + } + + const data = await response.json() + return { + success: true, + output: { + key: data.key ?? null, + name: data.name ?? null, + kind: data.kind ?? null, + description: data.description ?? null, + temporary: data.temporary ?? false, + archived: data.archived ?? false, + deprecated: data.deprecated ?? false, + creationDate: data.creationDate ?? null, + tags: data.tags ?? [], + variations: data.variations ?? [], + maintainerId: data.maintainerId ?? null, + }, + } + }, + + outputs: FLAG_OUTPUT_PROPERTIES, +} diff --git a/apps/sim/tools/launchdarkly/delete_flag.ts b/apps/sim/tools/launchdarkly/delete_flag.ts new file mode 100644 index 00000000000..2ec772ecb03 --- /dev/null +++ b/apps/sim/tools/launchdarkly/delete_flag.ts @@ -0,0 +1,61 @@ +import type { + LaunchDarklyDeleteFlagParams, + LaunchDarklyDeleteFlagResponse, +} from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyDeleteFlagTool: ToolConfig< + LaunchDarklyDeleteFlagParams, + LaunchDarklyDeleteFlagResponse +> = { + id: 'launchdarkly_delete_flag', + name: 'LaunchDarkly Delete Flag', + description: 'Delete a feature flag from a LaunchDarkly project.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key', + }, + flagKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The feature flag key to delete', + }, + }, + + request: { + url: (params) => + `https://app.launchdarkly.com/api/v2/flags/${encodeURIComponent(params.projectKey.trim())}/${encodeURIComponent(params.flagKey.trim())}`, + method: 'DELETE', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { success: false, output: { deleted: false }, error: error.message } + } + + return { + success: true, + output: { deleted: true }, + } + }, + + outputs: { + deleted: { type: 'boolean', description: 'Whether the flag was successfully deleted' }, + }, +} diff --git a/apps/sim/tools/launchdarkly/get_audit_log.ts b/apps/sim/tools/launchdarkly/get_audit_log.ts new file mode 100644 index 00000000000..0c31ebd37b7 --- /dev/null +++ b/apps/sim/tools/launchdarkly/get_audit_log.ts @@ -0,0 +1,95 @@ +import type { + LaunchDarklyGetAuditLogParams, + LaunchDarklyGetAuditLogResponse, +} from '@/tools/launchdarkly/types' +import { AUDIT_LOG_ENTRY_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyGetAuditLogTool: ToolConfig< + LaunchDarklyGetAuditLogParams, + LaunchDarklyGetAuditLogResponse +> = { + id: 'launchdarkly_get_audit_log', + name: 'LaunchDarkly Get Audit Log', + description: 'List audit log entries from your LaunchDarkly account.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of entries to return (default 10, max 20)', + }, + spec: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Filter expression (e.g., "resourceType:flag")', + }, + }, + + request: { + url: (params) => { + const queryParams = new URLSearchParams() + if (params.limit) queryParams.set('limit', String(params.limit)) + if (params.spec) queryParams.set('spec', params.spec) + const qs = queryParams.toString() + return `https://app.launchdarkly.com/api/v2/auditlog${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { success: false, output: { entries: [], totalCount: 0 }, error: error.message } + } + + const data = await response.json() + const entries = (data.items ?? []).map((item: Record) => { + const member = item.member as Record | undefined + const target = item.target as Record | undefined + return { + id: (item._id as string) ?? null, + date: item.date ?? null, + kind: item.kind ?? null, + name: item.name ?? null, + description: item.description ?? null, + shortDescription: item.shortDescription ?? null, + memberEmail: member?.email ?? null, + targetName: target?.name ?? null, + targetKind: (target?.resources as string[] | undefined)?.[0] ?? null, + } + }) + + return { + success: true, + output: { + entries, + totalCount: (data.totalCount as number) ?? entries.length, + }, + } + }, + + outputs: { + entries: { + type: 'array', + description: 'List of audit log entries', + items: { + type: 'object', + properties: AUDIT_LOG_ENTRY_OUTPUT_PROPERTIES, + }, + }, + totalCount: { type: 'number', description: 'Total number of audit log entries' }, + }, +} diff --git a/apps/sim/tools/launchdarkly/get_flag.ts b/apps/sim/tools/launchdarkly/get_flag.ts new file mode 100644 index 00000000000..68c7f01584d --- /dev/null +++ b/apps/sim/tools/launchdarkly/get_flag.ts @@ -0,0 +1,119 @@ +import type { + LaunchDarklyGetFlagParams, + LaunchDarklyGetFlagResponse, +} from '@/tools/launchdarkly/types' +import { FLAG_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyGetFlagTool: ToolConfig< + LaunchDarklyGetFlagParams, + LaunchDarklyGetFlagResponse +> = { + id: 'launchdarkly_get_flag', + name: 'LaunchDarkly Get Flag', + description: 'Get a single feature flag by key from a LaunchDarkly project.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key', + }, + flagKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The feature flag key', + }, + environmentKey: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Filter flag configuration to a specific environment', + }, + }, + + request: { + url: (params) => { + const queryParams = new URLSearchParams() + if (params.environmentKey) queryParams.set('env', params.environmentKey) + const qs = queryParams.toString() + return `https://app.launchdarkly.com/api/v2/flags/${encodeURIComponent(params.projectKey.trim())}/${encodeURIComponent(params.flagKey.trim())}${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { + success: false, + output: { + key: '', + name: '', + kind: '', + description: null, + temporary: false, + archived: false, + deprecated: false, + creationDate: 0, + tags: [], + variations: [], + maintainerId: null, + on: null, + }, + error: error.message, + } + } + + const data = await response.json() + + const environments = data.environments as Record> | undefined + let on: boolean | null = null + if (environments) { + const envKeys = Object.keys(environments) + if (envKeys.length === 1) { + on = (environments[envKeys[0]].on as boolean) ?? null + } + } + + return { + success: true, + output: { + key: data.key ?? null, + name: data.name ?? null, + kind: data.kind ?? null, + description: data.description ?? null, + temporary: data.temporary ?? false, + archived: data.archived ?? false, + deprecated: data.deprecated ?? false, + creationDate: data.creationDate ?? null, + tags: data.tags ?? [], + variations: data.variations ?? [], + maintainerId: data.maintainerId ?? null, + on, + }, + } + }, + + outputs: { + ...FLAG_OUTPUT_PROPERTIES, + on: { + type: 'boolean', + description: + 'Whether the flag is on in the requested environment (null if no single environment was specified)', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/launchdarkly/get_flag_status.ts b/apps/sim/tools/launchdarkly/get_flag_status.ts new file mode 100644 index 00000000000..584de06e0af --- /dev/null +++ b/apps/sim/tools/launchdarkly/get_flag_status.ts @@ -0,0 +1,81 @@ +import type { + LaunchDarklyGetFlagStatusParams, + LaunchDarklyGetFlagStatusResponse, +} from '@/tools/launchdarkly/types' +import { FLAG_STATUS_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyGetFlagStatusTool: ToolConfig< + LaunchDarklyGetFlagStatusParams, + LaunchDarklyGetFlagStatusResponse +> = { + id: 'launchdarkly_get_flag_status', + name: 'LaunchDarkly Get Flag Status', + description: + 'Get the status of a feature flag across environments (active, inactive, launched, etc.).', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key', + }, + flagKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The feature flag key', + }, + environmentKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The environment key', + }, + }, + + request: { + url: (params) => + `https://app.launchdarkly.com/api/v2/flag-statuses/${encodeURIComponent(params.projectKey.trim())}/${encodeURIComponent(params.environmentKey.trim())}/${encodeURIComponent(params.flagKey.trim())}`, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { + success: false, + output: { + name: '', + lastRequested: null, + defaultVal: null, + }, + error: error.message, + } + } + + const data = await response.json() + + return { + success: true, + output: { + name: data.name ?? null, + lastRequested: data.lastRequested ?? null, + defaultVal: data.default ?? null, + }, + } + }, + + outputs: FLAG_STATUS_OUTPUT_PROPERTIES, +} diff --git a/apps/sim/tools/launchdarkly/index.ts b/apps/sim/tools/launchdarkly/index.ts new file mode 100644 index 00000000000..b134cb4d067 --- /dev/null +++ b/apps/sim/tools/launchdarkly/index.ts @@ -0,0 +1,38 @@ +export { launchDarklyCreateFlagTool } from '@/tools/launchdarkly/create_flag' +export { launchDarklyDeleteFlagTool } from '@/tools/launchdarkly/delete_flag' +export { launchDarklyGetAuditLogTool } from '@/tools/launchdarkly/get_audit_log' +export { launchDarklyGetFlagTool } from '@/tools/launchdarkly/get_flag' +export { launchDarklyGetFlagStatusTool } from '@/tools/launchdarkly/get_flag_status' +export { launchDarklyListEnvironmentsTool } from '@/tools/launchdarkly/list_environments' +export { launchDarklyListFlagsTool } from '@/tools/launchdarkly/list_flags' +export { launchDarklyListMembersTool } from '@/tools/launchdarkly/list_members' +export { launchDarklyListProjectsTool } from '@/tools/launchdarkly/list_projects' +export { launchDarklyListSegmentsTool } from '@/tools/launchdarkly/list_segments' +export { launchDarklyToggleFlagTool } from '@/tools/launchdarkly/toggle_flag' +export type { + LaunchDarklyCreateFlagParams, + LaunchDarklyCreateFlagResponse, + LaunchDarklyDeleteFlagParams, + LaunchDarklyDeleteFlagResponse, + LaunchDarklyGetAuditLogParams, + LaunchDarklyGetAuditLogResponse, + LaunchDarklyGetFlagParams, + LaunchDarklyGetFlagResponse, + LaunchDarklyGetFlagStatusParams, + LaunchDarklyGetFlagStatusResponse, + LaunchDarklyListEnvironmentsParams, + LaunchDarklyListEnvironmentsResponse, + LaunchDarklyListFlagsParams, + LaunchDarklyListFlagsResponse, + LaunchDarklyListMembersParams, + LaunchDarklyListMembersResponse, + LaunchDarklyListProjectsParams, + LaunchDarklyListProjectsResponse, + LaunchDarklyListSegmentsParams, + LaunchDarklyListSegmentsResponse, + LaunchDarklyToggleFlagParams, + LaunchDarklyToggleFlagResponse, + LaunchDarklyUpdateFlagParams, + LaunchDarklyUpdateFlagResponse, +} from '@/tools/launchdarkly/types' +export { launchDarklyUpdateFlagTool } from '@/tools/launchdarkly/update_flag' diff --git a/apps/sim/tools/launchdarkly/list_environments.ts b/apps/sim/tools/launchdarkly/list_environments.ts new file mode 100644 index 00000000000..aa1647553a2 --- /dev/null +++ b/apps/sim/tools/launchdarkly/list_environments.ts @@ -0,0 +1,92 @@ +import type { + LaunchDarklyListEnvironmentsParams, + LaunchDarklyListEnvironmentsResponse, +} from '@/tools/launchdarkly/types' +import { ENVIRONMENT_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyListEnvironmentsTool: ToolConfig< + LaunchDarklyListEnvironmentsParams, + LaunchDarklyListEnvironmentsResponse +> = { + id: 'launchdarkly_list_environments', + name: 'LaunchDarkly List Environments', + description: 'List environments in a LaunchDarkly project.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key to list environments for', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of environments to return (default 20)', + }, + }, + + request: { + url: (params) => { + const queryParams = new URLSearchParams() + if (params.limit) queryParams.set('limit', String(params.limit)) + const qs = queryParams.toString() + return `https://app.launchdarkly.com/api/v2/projects/${encodeURIComponent(params.projectKey.trim())}/environments${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { + success: false, + output: { environments: [], totalCount: 0 }, + error: error.message, + } + } + + const data = await response.json() + const environments = (data.items ?? []).map((item: Record) => ({ + id: (item._id as string) ?? null, + key: item.key ?? null, + name: item.name ?? null, + color: item.color ?? null, + apiKey: item.apiKey ?? null, + mobileKey: item.mobileKey ?? null, + tags: (item.tags as string[]) ?? [], + })) + + return { + success: true, + output: { + environments, + totalCount: (data.totalCount as number) ?? environments.length, + }, + } + }, + + outputs: { + environments: { + type: 'array', + description: 'List of environments', + items: { + type: 'object', + properties: ENVIRONMENT_OUTPUT_PROPERTIES, + }, + }, + totalCount: { type: 'number', description: 'Total number of environments' }, + }, +} diff --git a/apps/sim/tools/launchdarkly/list_flags.ts b/apps/sim/tools/launchdarkly/list_flags.ts new file mode 100644 index 00000000000..bd4b208cd4b --- /dev/null +++ b/apps/sim/tools/launchdarkly/list_flags.ts @@ -0,0 +1,106 @@ +import type { + LaunchDarklyListFlagsParams, + LaunchDarklyListFlagsResponse, +} from '@/tools/launchdarkly/types' +import { FLAG_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyListFlagsTool: ToolConfig< + LaunchDarklyListFlagsParams, + LaunchDarklyListFlagsResponse +> = { + id: 'launchdarkly_list_flags', + name: 'LaunchDarkly List Flags', + description: 'List feature flags in a LaunchDarkly project.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key to list flags for', + }, + environmentKey: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Filter flag configurations to a specific environment', + }, + tag: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Filter flags by tag name', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of flags to return (default 20)', + }, + }, + + request: { + url: (params) => { + const queryParams = new URLSearchParams() + if (params.environmentKey) queryParams.set('env', params.environmentKey) + if (params.tag) queryParams.set('tag', params.tag) + if (params.limit) queryParams.set('limit', String(params.limit)) + const qs = queryParams.toString() + return `https://app.launchdarkly.com/api/v2/flags/${encodeURIComponent(params.projectKey.trim())}${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { success: false, output: { flags: [], totalCount: 0 }, error: error.message } + } + + const data = await response.json() + const flags = (data.items ?? []).map((item: Record) => ({ + key: item.key ?? null, + name: item.name ?? null, + kind: item.kind ?? null, + description: item.description ?? null, + temporary: item.temporary ?? false, + archived: item.archived ?? false, + deprecated: item.deprecated ?? false, + creationDate: item.creationDate ?? null, + tags: (item.tags as string[]) ?? [], + variations: (item.variations as Array>) ?? [], + maintainerId: item.maintainerId ?? null, + })) + + return { + success: true, + output: { + flags, + totalCount: (data.totalCount as number) ?? flags.length, + }, + } + }, + + outputs: { + flags: { + type: 'array', + description: 'List of feature flags', + items: { + type: 'object', + properties: FLAG_OUTPUT_PROPERTIES, + }, + }, + totalCount: { type: 'number', description: 'Total number of flags' }, + }, +} diff --git a/apps/sim/tools/launchdarkly/list_members.ts b/apps/sim/tools/launchdarkly/list_members.ts new file mode 100644 index 00000000000..969098b6fe5 --- /dev/null +++ b/apps/sim/tools/launchdarkly/list_members.ts @@ -0,0 +1,83 @@ +import type { + LaunchDarklyListMembersParams, + LaunchDarklyListMembersResponse, +} from '@/tools/launchdarkly/types' +import { MEMBER_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyListMembersTool: ToolConfig< + LaunchDarklyListMembersParams, + LaunchDarklyListMembersResponse +> = { + id: 'launchdarkly_list_members', + name: 'LaunchDarkly List Members', + description: 'List account members in your LaunchDarkly organization.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of members to return (default 20)', + }, + }, + + request: { + url: (params) => { + const queryParams = new URLSearchParams() + if (params.limit) queryParams.set('limit', String(params.limit)) + const qs = queryParams.toString() + return `https://app.launchdarkly.com/api/v2/members${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { success: false, output: { members: [], totalCount: 0 }, error: error.message } + } + + const data = await response.json() + const members = (data.items ?? []).map((item: Record) => ({ + id: (item._id as string) ?? null, + email: item.email ?? null, + firstName: item.firstName ?? null, + lastName: item.lastName ?? null, + role: item.role ?? null, + lastSeen: item._lastSeen ?? null, + creationDate: item.creationDate ?? null, + verified: item._verified ?? false, + })) + + return { + success: true, + output: { + members, + totalCount: (data.totalCount as number) ?? members.length, + }, + } + }, + + outputs: { + members: { + type: 'array', + description: 'List of account members', + items: { + type: 'object', + properties: MEMBER_OUTPUT_PROPERTIES, + }, + }, + totalCount: { type: 'number', description: 'Total number of members' }, + }, +} diff --git a/apps/sim/tools/launchdarkly/list_projects.ts b/apps/sim/tools/launchdarkly/list_projects.ts new file mode 100644 index 00000000000..e06a63a27bb --- /dev/null +++ b/apps/sim/tools/launchdarkly/list_projects.ts @@ -0,0 +1,79 @@ +import type { + LaunchDarklyListProjectsParams, + LaunchDarklyListProjectsResponse, +} from '@/tools/launchdarkly/types' +import { PROJECT_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyListProjectsTool: ToolConfig< + LaunchDarklyListProjectsParams, + LaunchDarklyListProjectsResponse +> = { + id: 'launchdarkly_list_projects', + name: 'LaunchDarkly List Projects', + description: 'List all projects in your LaunchDarkly account.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of projects to return (default 20)', + }, + }, + + request: { + url: (params) => { + const queryParams = new URLSearchParams() + if (params.limit) queryParams.set('limit', String(params.limit)) + const qs = queryParams.toString() + return `https://app.launchdarkly.com/api/v2/projects${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { success: false, output: { projects: [], totalCount: 0 }, error: error.message } + } + + const data = await response.json() + const projects = (data.items ?? []).map((item: Record) => ({ + id: (item._id as string) ?? null, + key: item.key ?? null, + name: item.name ?? null, + tags: (item.tags as string[]) ?? [], + })) + + return { + success: true, + output: { + projects, + totalCount: (data.totalCount as number) ?? projects.length, + }, + } + }, + + outputs: { + projects: { + type: 'array', + description: 'List of projects', + items: { + type: 'object', + properties: PROJECT_OUTPUT_PROPERTIES, + }, + }, + totalCount: { type: 'number', description: 'Total number of projects' }, + }, +} diff --git a/apps/sim/tools/launchdarkly/list_segments.ts b/apps/sim/tools/launchdarkly/list_segments.ts new file mode 100644 index 00000000000..c86e45082e3 --- /dev/null +++ b/apps/sim/tools/launchdarkly/list_segments.ts @@ -0,0 +1,95 @@ +import type { + LaunchDarklyListSegmentsParams, + LaunchDarklyListSegmentsResponse, +} from '@/tools/launchdarkly/types' +import { SEGMENT_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyListSegmentsTool: ToolConfig< + LaunchDarklyListSegmentsParams, + LaunchDarklyListSegmentsResponse +> = { + id: 'launchdarkly_list_segments', + name: 'LaunchDarkly List Segments', + description: 'List user segments in a LaunchDarkly project and environment.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key', + }, + environmentKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The environment key', + }, + limit: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of segments to return (default 20)', + }, + }, + + request: { + url: (params) => { + const queryParams = new URLSearchParams() + if (params.limit) queryParams.set('limit', String(params.limit)) + const qs = queryParams.toString() + return `https://app.launchdarkly.com/api/v2/segments/${encodeURIComponent(params.projectKey.trim())}/${encodeURIComponent(params.environmentKey.trim())}${qs ? `?${qs}` : ''}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + }), + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { success: false, output: { segments: [], totalCount: 0 }, error: error.message } + } + + const data = await response.json() + const segments = (data.items ?? []).map((item: Record) => ({ + key: item.key ?? null, + name: item.name ?? null, + description: item.description ?? null, + tags: (item.tags as string[]) ?? [], + creationDate: item.creationDate ?? null, + unbounded: item.unbounded ?? false, + included: (item.included as string[]) ?? [], + excluded: (item.excluded as string[]) ?? [], + })) + + return { + success: true, + output: { + segments, + totalCount: (data.totalCount as number) ?? segments.length, + }, + } + }, + + outputs: { + segments: { + type: 'array', + description: 'List of user segments', + items: { + type: 'object', + properties: SEGMENT_OUTPUT_PROPERTIES, + }, + }, + totalCount: { type: 'number', description: 'Total number of segments' }, + }, +} diff --git a/apps/sim/tools/launchdarkly/toggle_flag.ts b/apps/sim/tools/launchdarkly/toggle_flag.ts new file mode 100644 index 00000000000..fc4bdc429a1 --- /dev/null +++ b/apps/sim/tools/launchdarkly/toggle_flag.ts @@ -0,0 +1,125 @@ +import type { + LaunchDarklyToggleFlagParams, + LaunchDarklyToggleFlagResponse, +} from '@/tools/launchdarkly/types' +import { FLAG_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyToggleFlagTool: ToolConfig< + LaunchDarklyToggleFlagParams, + LaunchDarklyToggleFlagResponse +> = { + id: 'launchdarkly_toggle_flag', + name: 'LaunchDarkly Toggle Flag', + description: 'Toggle a feature flag on or off in a specific LaunchDarkly environment.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key', + }, + flagKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The feature flag key to toggle', + }, + environmentKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The environment key to toggle the flag in', + }, + enabled: { + type: 'boolean', + required: true, + visibility: 'user-or-llm', + description: 'Whether to turn the flag on (true) or off (false)', + }, + }, + + request: { + url: (params) => + `https://app.launchdarkly.com/api/v2/flags/${encodeURIComponent(params.projectKey.trim())}/${encodeURIComponent(params.flagKey.trim())}`, + method: 'PATCH', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + 'Content-Type': 'application/json; domain-model=launchdarkly.semanticpatch', + }), + body: (params) => ({ + environmentKey: params.environmentKey, + instructions: [{ kind: params.enabled ? 'turnFlagOn' : 'turnFlagOff' }], + }), + }, + + transformResponse: async (response: Response, params?: LaunchDarklyToggleFlagParams) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { + success: false, + output: { + key: '', + name: '', + kind: '', + description: null, + temporary: false, + archived: false, + deprecated: false, + creationDate: 0, + tags: [], + variations: [], + maintainerId: null, + on: null, + }, + error: error.message, + } + } + + const data = await response.json() + + const environments = data.environments as Record> | undefined + let on: boolean | null = null + if (environments) { + const envKey = params?.environmentKey?.trim() + if (envKey && environments[envKey]) { + on = (environments[envKey].on as boolean) ?? null + } + } + + return { + success: true, + output: { + key: data.key ?? null, + name: data.name ?? null, + kind: data.kind ?? null, + description: data.description ?? null, + temporary: data.temporary ?? false, + archived: data.archived ?? false, + deprecated: data.deprecated ?? false, + creationDate: data.creationDate ?? null, + tags: data.tags ?? [], + variations: data.variations ?? [], + maintainerId: data.maintainerId ?? null, + on, + }, + } + }, + + outputs: { + ...FLAG_OUTPUT_PROPERTIES, + on: { + type: 'boolean', + description: 'Whether the flag is now on in the target environment', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/launchdarkly/types.ts b/apps/sim/tools/launchdarkly/types.ts new file mode 100644 index 00000000000..beeea949cb1 --- /dev/null +++ b/apps/sim/tools/launchdarkly/types.ts @@ -0,0 +1,362 @@ +import type { OutputProperty, ToolResponse } from '@/tools/types' + +/** + * Shared output property definitions for LaunchDarkly API responses. + * Based on LaunchDarkly REST API v2: https://apidocs.launchdarkly.com/ + */ + +export const FLAG_OUTPUT_PROPERTIES = { + key: { type: 'string', description: 'The unique key of the feature flag' }, + name: { type: 'string', description: 'The human-readable name of the feature flag' }, + kind: { type: 'string', description: 'The type of flag (boolean or multivariate)' }, + description: { type: 'string', description: 'Description of the feature flag', optional: true }, + temporary: { type: 'boolean', description: 'Whether the flag is temporary' }, + archived: { type: 'boolean', description: 'Whether the flag is archived' }, + deprecated: { type: 'boolean', description: 'Whether the flag is deprecated' }, + creationDate: { + type: 'number', + description: 'Unix timestamp in milliseconds when the flag was created', + }, + tags: { + type: 'array', + description: 'Tags applied to the flag', + items: { type: 'string', description: 'Tag name' }, + }, + variations: { + type: 'array', + description: 'The variations for this feature flag', + items: { + type: 'object', + properties: { + value: { type: 'string', description: 'The variation value' }, + name: { type: 'string', description: 'The variation name', optional: true }, + description: { type: 'string', description: 'The variation description', optional: true }, + }, + }, + }, + maintainerId: { + type: 'string', + description: 'The ID of the member who maintains this flag', + optional: true, + }, +} as const satisfies Record + +export const PROJECT_OUTPUT_PROPERTIES = { + id: { type: 'string', description: 'The project ID' }, + key: { type: 'string', description: 'The unique project key' }, + name: { type: 'string', description: 'The project name' }, + tags: { + type: 'array', + description: 'Tags applied to the project', + items: { type: 'string', description: 'Tag name' }, + }, +} as const satisfies Record + +export const ENVIRONMENT_OUTPUT_PROPERTIES = { + id: { type: 'string', description: 'The environment ID' }, + key: { type: 'string', description: 'The unique environment key' }, + name: { type: 'string', description: 'The environment name' }, + color: { type: 'string', description: 'The color assigned to this environment' }, + apiKey: { type: 'string', description: 'The server-side SDK key for this environment' }, + mobileKey: { type: 'string', description: 'The mobile SDK key for this environment' }, + tags: { + type: 'array', + description: 'Tags applied to the environment', + items: { type: 'string', description: 'Tag name' }, + }, +} as const satisfies Record + +export const AUDIT_LOG_ENTRY_OUTPUT_PROPERTIES = { + id: { type: 'string', description: 'The audit log entry ID' }, + date: { type: 'number', description: 'Unix timestamp in milliseconds' }, + kind: { type: 'string', description: 'The type of action performed' }, + name: { type: 'string', description: 'The name of the resource acted on' }, + description: { type: 'string', description: 'Full description of the action', optional: true }, + shortDescription: { + type: 'string', + description: 'Short description of the action', + optional: true, + }, + memberEmail: { + type: 'string', + description: 'Email of the member who performed the action', + optional: true, + }, + targetName: { type: 'string', description: 'Name of the target resource', optional: true }, + targetKind: { type: 'string', description: 'Kind of the target resource', optional: true }, +} as const satisfies Record + +export const SEGMENT_OUTPUT_PROPERTIES = { + key: { type: 'string', description: 'The unique segment key' }, + name: { type: 'string', description: 'The segment name' }, + description: { type: 'string', description: 'The segment description', optional: true }, + tags: { + type: 'array', + description: 'Tags applied to the segment', + items: { type: 'string', description: 'Tag name' }, + }, + creationDate: { + type: 'number', + description: 'Unix timestamp in milliseconds when the segment was created', + }, + unbounded: { type: 'boolean', description: 'Whether this is an unbounded (big) segment' }, + included: { + type: 'array', + description: 'User keys explicitly included in the segment', + items: { type: 'string', description: 'User key' }, + }, + excluded: { + type: 'array', + description: 'User keys explicitly excluded from the segment', + items: { type: 'string', description: 'User key' }, + }, +} as const satisfies Record + +export const FLAG_STATUS_OUTPUT_PROPERTIES = { + name: { type: 'string', description: 'The flag status (new, active, inactive, launched)' }, + lastRequested: { + type: 'string', + description: 'Timestamp of the last evaluation', + optional: true, + }, + defaultVal: { type: 'string', description: 'The default variation value', optional: true }, +} as const satisfies Record + +export const MEMBER_OUTPUT_PROPERTIES = { + id: { type: 'string', description: 'The member ID' }, + email: { type: 'string', description: 'The member email address' }, + firstName: { type: 'string', description: 'The member first name', optional: true }, + lastName: { type: 'string', description: 'The member last name', optional: true }, + role: { type: 'string', description: 'The member role (reader, writer, admin, owner)' }, + lastSeen: { type: 'number', description: 'Unix timestamp of last activity', optional: true }, + creationDate: { type: 'number', description: 'Unix timestamp when the member was created' }, + verified: { type: 'boolean', description: 'Whether the member email is verified' }, +} as const satisfies Record + +export interface LaunchDarklyListFlagsParams { + apiKey: string + projectKey: string + environmentKey?: string + tag?: string + limit?: number +} + +export interface LaunchDarklyGetFlagParams { + apiKey: string + projectKey: string + flagKey: string + environmentKey?: string +} + +export interface LaunchDarklyCreateFlagParams { + apiKey: string + projectKey: string + name: string + key: string + description?: string + tags?: string + temporary?: boolean +} + +export interface LaunchDarklyToggleFlagParams { + apiKey: string + projectKey: string + flagKey: string + environmentKey: string + enabled: boolean +} + +export interface LaunchDarklyDeleteFlagParams { + apiKey: string + projectKey: string + flagKey: string +} + +export interface LaunchDarklyListProjectsParams { + apiKey: string + limit?: number +} + +export interface LaunchDarklyListEnvironmentsParams { + apiKey: string + projectKey: string + limit?: number +} + +interface FlagItem { + key: string + name: string + kind: string + description: string | null + temporary: boolean + archived: boolean + deprecated: boolean + creationDate: number + tags: string[] + variations: Array<{ value: unknown; name?: string; description?: string }> + maintainerId: string | null +} + +interface ProjectItem { + id: string + key: string + name: string + tags: string[] +} + +interface EnvironmentItem { + id: string + key: string + name: string + color: string + apiKey: string + mobileKey: string + tags: string[] +} + +export interface LaunchDarklyListFlagsResponse extends ToolResponse { + output: { + flags: FlagItem[] + totalCount: number + } +} + +export interface LaunchDarklyGetFlagResponse extends ToolResponse { + output: FlagItem & { + on: boolean | null + } +} + +export interface LaunchDarklyCreateFlagResponse extends ToolResponse { + output: FlagItem +} + +export interface LaunchDarklyToggleFlagResponse extends ToolResponse { + output: FlagItem & { + on: boolean | null + } +} + +export interface LaunchDarklyDeleteFlagResponse extends ToolResponse { + output: { + deleted: boolean + } +} + +export interface LaunchDarklyListProjectsResponse extends ToolResponse { + output: { + projects: ProjectItem[] + totalCount: number + } +} + +export interface LaunchDarklyListEnvironmentsResponse extends ToolResponse { + output: { + environments: EnvironmentItem[] + totalCount: number + } +} + +export interface LaunchDarklyUpdateFlagParams { + apiKey: string + projectKey: string + flagKey: string + updateName?: string + updateDescription?: string + addTags?: string + removeTags?: string + archive?: boolean + comment?: string +} + +export interface LaunchDarklyUpdateFlagResponse extends ToolResponse { + output: FlagItem +} + +export interface LaunchDarklyGetAuditLogParams { + apiKey: string + limit?: number + spec?: string +} + +interface AuditLogEntry { + id: string + date: number | null + kind: string | null + name: string | null + description: string | null + shortDescription: string | null + memberEmail: string | null + targetName: string | null + targetKind: string | null +} + +export interface LaunchDarklyGetAuditLogResponse extends ToolResponse { + output: { + entries: AuditLogEntry[] + totalCount: number + } +} + +export interface LaunchDarklyListSegmentsParams { + apiKey: string + projectKey: string + environmentKey: string + limit?: number +} + +interface SegmentItem { + key: string + name: string + description: string | null + tags: string[] + creationDate: number | null + unbounded: boolean + included: string[] + excluded: string[] +} + +export interface LaunchDarklyListSegmentsResponse extends ToolResponse { + output: { + segments: SegmentItem[] + totalCount: number + } +} + +export interface LaunchDarklyGetFlagStatusParams { + apiKey: string + projectKey: string + flagKey: string + environmentKey: string +} + +export interface LaunchDarklyGetFlagStatusResponse extends ToolResponse { + output: { + name: string + lastRequested: string | null + defaultVal: string | null + } +} + +export interface LaunchDarklyListMembersParams { + apiKey: string + limit?: number +} + +interface MemberItem { + id: string + email: string | null + firstName: string | null + lastName: string | null + role: string | null + lastSeen: number | null + creationDate: number | null + verified: boolean +} + +export interface LaunchDarklyListMembersResponse extends ToolResponse { + output: { + members: MemberItem[] + totalCount: number + } +} diff --git a/apps/sim/tools/launchdarkly/update_flag.ts b/apps/sim/tools/launchdarkly/update_flag.ts new file mode 100644 index 00000000000..fe750ac4e0b --- /dev/null +++ b/apps/sim/tools/launchdarkly/update_flag.ts @@ -0,0 +1,165 @@ +import type { + LaunchDarklyUpdateFlagParams, + LaunchDarklyUpdateFlagResponse, +} from '@/tools/launchdarkly/types' +import { FLAG_OUTPUT_PROPERTIES } from '@/tools/launchdarkly/types' +import type { ToolConfig } from '@/tools/types' + +export const launchDarklyUpdateFlagTool: ToolConfig< + LaunchDarklyUpdateFlagParams, + LaunchDarklyUpdateFlagResponse +> = { + id: 'launchdarkly_update_flag', + name: 'LaunchDarkly Update Flag', + description: + 'Update a feature flag metadata (name, description, tags, temporary, archived) using semantic patch.', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'LaunchDarkly API key', + }, + projectKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The project key', + }, + flagKey: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The feature flag key to update', + }, + updateName: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'New name for the flag', + }, + updateDescription: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'New description for the flag', + }, + addTags: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated tags to add', + }, + removeTags: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated tags to remove', + }, + archive: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Set to true to archive, false to restore', + }, + comment: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Optional comment explaining the update', + }, + }, + + request: { + url: (params) => + `https://app.launchdarkly.com/api/v2/flags/${encodeURIComponent(params.projectKey.trim())}/${encodeURIComponent(params.flagKey.trim())}`, + method: 'PATCH', + headers: (params) => ({ + Authorization: params.apiKey.trim(), + 'Content-Type': 'application/json; domain-model=launchdarkly.semanticpatch', + }), + body: (params) => { + const instructions: Array> = [] + + if (params.updateName) { + instructions.push({ kind: 'updateName', value: params.updateName }) + } + if (params.updateDescription) { + instructions.push({ kind: 'updateDescription', value: params.updateDescription }) + } + if (params.addTags) { + instructions.push({ + kind: 'addTags', + values: params.addTags.split(',').map((t: string) => t.trim()), + }) + } + if (params.removeTags) { + instructions.push({ + kind: 'removeTags', + values: params.removeTags.split(',').map((t: string) => t.trim()), + }) + } + if (params.archive === true) { + instructions.push({ kind: 'archiveFlag' }) + } else if (params.archive === false) { + instructions.push({ kind: 'restoreFlag' }) + } + + if (instructions.length === 0) { + throw new Error( + 'At least one update field must be provided (updateName, updateDescription, addTags, removeTags, or archive)' + ) + } + + const body: Record = { instructions } + if (params.comment) body.comment = params.comment + + return body + }, + }, + + transformResponse: async (response: Response) => { + if (!response.ok) { + const error = await response.json().catch(() => ({ message: response.statusText })) + return { + success: false, + output: { + key: '', + name: '', + kind: '', + description: null, + temporary: false, + archived: false, + deprecated: false, + creationDate: 0, + tags: [], + variations: [], + maintainerId: null, + }, + error: error.message, + } + } + + const data = await response.json() + return { + success: true, + output: { + key: data.key ?? null, + name: data.name ?? null, + kind: data.kind ?? null, + description: data.description ?? null, + temporary: data.temporary ?? false, + archived: data.archived ?? false, + deprecated: data.deprecated ?? false, + creationDate: data.creationDate ?? null, + tags: data.tags ?? [], + variations: data.variations ?? [], + maintainerId: data.maintainerId ?? null, + }, + } + }, + + outputs: FLAG_OUTPUT_PROPERTIES, +} diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index 5c269a7c168..a9618191982 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -149,6 +149,7 @@ import { attioGetNoteTool, attioGetObjectTool, attioGetRecordTool, + attioGetTaskTool, attioGetThreadTool, attioGetWebhookTool, attioListListsTool, @@ -491,6 +492,7 @@ import { exaResearchTool, exaSearchTool, } from '@/tools/exa' +import { extendParserTool, extendParserV2Tool } from '@/tools/extend' import { fathomGetSummaryTool, fathomGetTranscriptTool, @@ -1290,6 +1292,20 @@ import { knowledgeUpsertDocumentTool, } from '@/tools/knowledge' import { langsmithCreateRunsBatchTool, langsmithCreateRunTool } from '@/tools/langsmith' +import { + launchDarklyCreateFlagTool, + launchDarklyDeleteFlagTool, + launchDarklyGetAuditLogTool, + launchDarklyGetFlagStatusTool, + launchDarklyGetFlagTool, + launchDarklyListEnvironmentsTool, + launchDarklyListFlagsTool, + launchDarklyListMembersTool, + launchDarklyListProjectsTool, + launchDarklyListSegmentsTool, + launchDarklyToggleFlagTool, + launchDarklyUpdateFlagTool, +} from '@/tools/launchdarkly' import { lemlistGetActivitiesTool, lemlistGetLeadTool, lemlistSendEmailTool } from '@/tools/lemlist' import { linearAddLabelToIssueTool, @@ -1947,6 +1963,13 @@ import { salesforceUpdateTaskTool, } from '@/tools/salesforce' import { searchTool } from '@/tools/search' +import { + secretsManagerCreateSecretTool, + secretsManagerDeleteSecretTool, + secretsManagerGetSecretTool, + secretsManagerListSecretsTool, + secretsManagerUpdateSecretTool, +} from '@/tools/secrets_manager' import { sendGridAddContactsToListTool, sendGridAddContactTool, @@ -2261,6 +2284,28 @@ import { tableUpdateRowTool, tableUpsertRowTool, } from '@/tools/table' +import { + tailscaleAuthorizeDeviceTool, + tailscaleCreateAuthKeyTool, + tailscaleDeleteAuthKeyTool, + tailscaleDeleteDeviceTool, + tailscaleGetAclTool, + tailscaleGetAuthKeyTool, + tailscaleGetDeviceRoutesTool, + tailscaleGetDeviceTool, + tailscaleGetDnsPreferencesTool, + tailscaleGetDnsSearchpathsTool, + tailscaleListAuthKeysTool, + tailscaleListDevicesTool, + tailscaleListDnsNameserversTool, + tailscaleListUsersTool, + tailscaleSetDeviceRoutesTool, + tailscaleSetDeviceTagsTool, + tailscaleSetDnsNameserversTool, + tailscaleSetDnsPreferencesTool, + tailscaleSetDnsSearchpathsTool, + tailscaleUpdateDeviceKeyTool, +} from '@/tools/tailscale' import { tavilyCrawlTool, tavilyExtractTool, tavilyMapTool, tavilySearchTool } from '@/tools/tavily' import { telegramDeleteMessageTool, @@ -2964,6 +3009,26 @@ export const tools: Record = { supabase_storage_delete_bucket: supabaseStorageDeleteBucketTool, supabase_storage_get_public_url: supabaseStorageGetPublicUrlTool, supabase_storage_create_signed_url: supabaseStorageCreateSignedUrlTool, + tailscale_list_devices: tailscaleListDevicesTool, + tailscale_get_device: tailscaleGetDeviceTool, + tailscale_delete_device: tailscaleDeleteDeviceTool, + tailscale_authorize_device: tailscaleAuthorizeDeviceTool, + tailscale_set_device_tags: tailscaleSetDeviceTagsTool, + tailscale_get_device_routes: tailscaleGetDeviceRoutesTool, + tailscale_set_device_routes: tailscaleSetDeviceRoutesTool, + tailscale_update_device_key: tailscaleUpdateDeviceKeyTool, + tailscale_list_dns_nameservers: tailscaleListDnsNameserversTool, + tailscale_set_dns_nameservers: tailscaleSetDnsNameserversTool, + tailscale_get_dns_preferences: tailscaleGetDnsPreferencesTool, + tailscale_set_dns_preferences: tailscaleSetDnsPreferencesTool, + tailscale_get_dns_searchpaths: tailscaleGetDnsSearchpathsTool, + tailscale_set_dns_searchpaths: tailscaleSetDnsSearchpathsTool, + tailscale_list_users: tailscaleListUsersTool, + tailscale_create_auth_key: tailscaleCreateAuthKeyTool, + tailscale_list_auth_keys: tailscaleListAuthKeysTool, + tailscale_get_auth_key: tailscaleGetAuthKeyTool, + tailscale_delete_auth_key: tailscaleDeleteAuthKeyTool, + tailscale_get_acl: tailscaleGetAclTool, calendly_get_current_user: calendlyGetCurrentUserTool, calendly_list_event_types: calendlyListEventTypesTool, calendly_get_event_type: calendlyGetEventTypeTool, @@ -3484,6 +3549,8 @@ export const tools: Record = { enrich_search_posts: enrichSearchPostsTool, enrich_search_similar_companies: enrichSearchSimilarCompaniesTool, enrich_verify_email: enrichVerifyEmailTool, + extend_parser: extendParserTool, + extend_parser_v2: extendParserV2Tool, exa_search: exaSearchTool, exa_get_contents: exaGetContentsTool, exa_find_similar_links: exaFindSimilarLinksTool, @@ -3904,6 +3971,7 @@ export const tools: Record = { attio_get_note: attioGetNoteTool, attio_get_object: attioGetObjectTool, attio_get_record: attioGetRecordTool, + attio_get_task: attioGetTaskTool, attio_get_thread: attioGetThreadTool, attio_get_webhook: attioGetWebhookTool, attio_list_lists: attioListListsTool, @@ -4033,6 +4101,11 @@ export const tools: Record = { s3_list_objects: s3ListObjectsTool, s3_delete_object: s3DeleteObjectTool, s3_copy_object: s3CopyObjectTool, + secrets_manager_get_secret: secretsManagerGetSecretTool, + secrets_manager_list_secrets: secretsManagerListSecretsTool, + secrets_manager_create_secret: secretsManagerCreateSecretTool, + secrets_manager_update_secret: secretsManagerUpdateSecretTool, + secrets_manager_delete_secret: secretsManagerDeleteSecretTool, telegram_message: telegramMessageTool, telegram_delete_message: telegramDeleteMessageTool, telegram_send_audio: telegramSendAudioTool, @@ -4243,6 +4316,18 @@ export const tools: Record = { linear_list_project_statuses: linearListProjectStatusesTool, langsmith_create_run: langsmithCreateRunTool, langsmith_create_runs_batch: langsmithCreateRunsBatchTool, + launchdarkly_create_flag: launchDarklyCreateFlagTool, + launchdarkly_delete_flag: launchDarklyDeleteFlagTool, + launchdarkly_get_audit_log: launchDarklyGetAuditLogTool, + launchdarkly_get_flag: launchDarklyGetFlagTool, + launchdarkly_get_flag_status: launchDarklyGetFlagStatusTool, + launchdarkly_list_environments: launchDarklyListEnvironmentsTool, + launchdarkly_list_flags: launchDarklyListFlagsTool, + launchdarkly_list_members: launchDarklyListMembersTool, + launchdarkly_list_projects: launchDarklyListProjectsTool, + launchdarkly_list_segments: launchDarklyListSegmentsTool, + launchdarkly_toggle_flag: launchDarklyToggleFlagTool, + launchdarkly_update_flag: launchDarklyUpdateFlagTool, lemlist_get_activities: lemlistGetActivitiesTool, lemlist_get_lead: lemlistGetLeadTool, lemlist_send_email: lemlistSendEmailTool, diff --git a/apps/sim/tools/secrets_manager/create_secret.ts b/apps/sim/tools/secrets_manager/create_secret.ts new file mode 100644 index 00000000000..331ee728588 --- /dev/null +++ b/apps/sim/tools/secrets_manager/create_secret.ts @@ -0,0 +1,94 @@ +import type { + SecretsManagerCreateSecretParams, + SecretsManagerCreateSecretResponse, +} from '@/tools/secrets_manager/types' +import type { ToolConfig } from '@/tools/types' + +export const createSecretTool: ToolConfig< + SecretsManagerCreateSecretParams, + SecretsManagerCreateSecretResponse +> = { + id: 'secrets_manager_create_secret', + name: 'Secrets Manager Create Secret', + description: 'Create a new secret in AWS Secrets Manager', + version: '1.0', + + params: { + region: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS region (e.g., us-east-1)', + }, + accessKeyId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS access key ID', + }, + secretAccessKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS secret access key', + }, + name: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Name of the secret to create', + }, + secretValue: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The secret value (plain text or JSON string)', + }, + description: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Description of the secret', + }, + }, + + request: { + url: '/api/tools/secrets_manager/create-secret', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + name: params.name, + secretValue: params.secretValue, + description: params.description, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to create secret') + } + + return { + success: true, + output: { + message: data.message || 'Secret created successfully', + name: data.name ?? '', + arn: data.arn ?? '', + versionId: data.versionId ?? '', + }, + error: undefined, + } + }, + + outputs: { + message: { type: 'string', description: 'Operation status message' }, + name: { type: 'string', description: 'Name of the created secret' }, + arn: { type: 'string', description: 'ARN of the created secret' }, + versionId: { type: 'string', description: 'Version ID of the created secret' }, + }, +} diff --git a/apps/sim/tools/secrets_manager/delete_secret.ts b/apps/sim/tools/secrets_manager/delete_secret.ts new file mode 100644 index 00000000000..afea77a2080 --- /dev/null +++ b/apps/sim/tools/secrets_manager/delete_secret.ts @@ -0,0 +1,94 @@ +import type { + SecretsManagerDeleteSecretParams, + SecretsManagerDeleteSecretResponse, +} from '@/tools/secrets_manager/types' +import type { ToolConfig } from '@/tools/types' + +export const deleteSecretTool: ToolConfig< + SecretsManagerDeleteSecretParams, + SecretsManagerDeleteSecretResponse +> = { + id: 'secrets_manager_delete_secret', + name: 'Secrets Manager Delete Secret', + description: 'Delete a secret from AWS Secrets Manager', + version: '1.0', + + params: { + region: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS region (e.g., us-east-1)', + }, + accessKeyId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS access key ID', + }, + secretAccessKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS secret access key', + }, + secretId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The name or ARN of the secret to delete', + }, + recoveryWindowInDays: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Number of days before permanent deletion (7-30, default 30)', + }, + forceDelete: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'If true, immediately delete without recovery window', + }, + }, + + request: { + url: '/api/tools/secrets_manager/delete-secret', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + secretId: params.secretId, + recoveryWindowInDays: params.recoveryWindowInDays, + forceDelete: params.forceDelete, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to delete secret') + } + + return { + success: true, + output: { + message: data.message || 'Secret scheduled for deletion', + name: data.name ?? '', + arn: data.arn ?? '', + deletionDate: data.deletionDate ?? null, + }, + error: undefined, + } + }, + + outputs: { + message: { type: 'string', description: 'Operation status message' }, + name: { type: 'string', description: 'Name of the deleted secret' }, + arn: { type: 'string', description: 'ARN of the deleted secret' }, + deletionDate: { type: 'string', description: 'Scheduled deletion date', optional: true }, + }, +} diff --git a/apps/sim/tools/secrets_manager/get_secret.ts b/apps/sim/tools/secrets_manager/get_secret.ts new file mode 100644 index 00000000000..10fb58c3c9d --- /dev/null +++ b/apps/sim/tools/secrets_manager/get_secret.ts @@ -0,0 +1,98 @@ +import type { + SecretsManagerGetSecretParams, + SecretsManagerGetSecretResponse, +} from '@/tools/secrets_manager/types' +import type { ToolConfig } from '@/tools/types' + +export const getSecretTool: ToolConfig< + SecretsManagerGetSecretParams, + SecretsManagerGetSecretResponse +> = { + id: 'secrets_manager_get_secret', + name: 'Secrets Manager Get Secret', + description: 'Retrieve a secret value from AWS Secrets Manager', + version: '1.0', + + params: { + region: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS region (e.g., us-east-1)', + }, + accessKeyId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS access key ID', + }, + secretAccessKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS secret access key', + }, + secretId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The name or ARN of the secret to retrieve', + }, + versionId: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'The unique identifier of the version to retrieve', + }, + versionStage: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'The staging label of the version to retrieve (e.g., AWSCURRENT, AWSPREVIOUS)', + }, + }, + + request: { + url: '/api/tools/secrets_manager/get-secret', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + secretId: params.secretId, + versionId: params.versionId, + versionStage: params.versionStage, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to retrieve secret') + } + + return { + success: true, + output: { + name: data.name ?? '', + secretValue: data.secretValue ?? '', + arn: data.arn ?? '', + versionId: data.versionId ?? '', + versionStages: data.versionStages ?? [], + createdDate: data.createdDate ?? null, + }, + error: undefined, + } + }, + + outputs: { + name: { type: 'string', description: 'Name of the secret' }, + secretValue: { type: 'string', description: 'The decrypted secret value' }, + arn: { type: 'string', description: 'ARN of the secret' }, + versionId: { type: 'string', description: 'Version ID of the secret' }, + versionStages: { type: 'array', description: 'Staging labels attached to this version' }, + createdDate: { type: 'string', description: 'Date the secret was created' }, + }, +} diff --git a/apps/sim/tools/secrets_manager/index.ts b/apps/sim/tools/secrets_manager/index.ts new file mode 100644 index 00000000000..7bc9beae221 --- /dev/null +++ b/apps/sim/tools/secrets_manager/index.ts @@ -0,0 +1,11 @@ +import { createSecretTool } from './create_secret' +import { deleteSecretTool } from './delete_secret' +import { getSecretTool } from './get_secret' +import { listSecretsTool } from './list_secrets' +import { updateSecretTool } from './update_secret' + +export const secretsManagerGetSecretTool = getSecretTool +export const secretsManagerListSecretsTool = listSecretsTool +export const secretsManagerCreateSecretTool = createSecretTool +export const secretsManagerUpdateSecretTool = updateSecretTool +export const secretsManagerDeleteSecretTool = deleteSecretTool diff --git a/apps/sim/tools/secrets_manager/list_secrets.ts b/apps/sim/tools/secrets_manager/list_secrets.ts new file mode 100644 index 00000000000..d5e41f229eb --- /dev/null +++ b/apps/sim/tools/secrets_manager/list_secrets.ts @@ -0,0 +1,92 @@ +import type { + SecretsManagerListSecretsParams, + SecretsManagerListSecretsResponse, +} from '@/tools/secrets_manager/types' +import type { ToolConfig } from '@/tools/types' + +export const listSecretsTool: ToolConfig< + SecretsManagerListSecretsParams, + SecretsManagerListSecretsResponse +> = { + id: 'secrets_manager_list_secrets', + name: 'Secrets Manager List Secrets', + description: 'List secrets stored in AWS Secrets Manager', + version: '1.0', + + params: { + region: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS region (e.g., us-east-1)', + }, + accessKeyId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS access key ID', + }, + secretAccessKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS secret access key', + }, + maxResults: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Maximum number of secrets to return (1-100, default 100)', + }, + nextToken: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Pagination token from a previous request', + }, + }, + + request: { + url: '/api/tools/secrets_manager/list-secrets', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + maxResults: params.maxResults, + nextToken: params.nextToken, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to list secrets') + } + + return { + success: true, + output: { + secrets: data.secrets ?? [], + nextToken: data.nextToken ?? null, + count: data.count ?? 0, + }, + error: undefined, + } + }, + + outputs: { + secrets: { + type: 'json', + description: 'List of secrets with name, ARN, description, and dates', + }, + nextToken: { + type: 'string', + description: 'Pagination token for the next page of results', + optional: true, + }, + count: { type: 'number', description: 'Number of secrets returned' }, + }, +} diff --git a/apps/sim/tools/secrets_manager/types.ts b/apps/sim/tools/secrets_manager/types.ts new file mode 100644 index 00000000000..48c40e6d0f2 --- /dev/null +++ b/apps/sim/tools/secrets_manager/types.ts @@ -0,0 +1,101 @@ +import type { ToolResponse } from '@/tools/types' + +export interface SecretsManagerConnectionConfig { + region: string + accessKeyId: string + secretAccessKey: string +} + +export interface SecretsManagerGetSecretParams extends SecretsManagerConnectionConfig { + secretId: string + versionId?: string | null + versionStage?: string | null +} + +export interface SecretsManagerListSecretsParams extends SecretsManagerConnectionConfig { + maxResults?: number | null + nextToken?: string | null +} + +export interface SecretsManagerCreateSecretParams extends SecretsManagerConnectionConfig { + name: string + secretValue: string + description?: string | null +} + +export interface SecretsManagerUpdateSecretParams extends SecretsManagerConnectionConfig { + secretId: string + secretValue: string + description?: string | null +} + +export interface SecretsManagerDeleteSecretParams extends SecretsManagerConnectionConfig { + secretId: string + recoveryWindowInDays?: number | null + forceDelete?: boolean | null +} + +export interface SecretsManagerBaseResponse extends ToolResponse { + output: { message: string } + error?: string +} + +export interface SecretsManagerGetSecretResponse extends ToolResponse { + output: { + name: string + secretValue: string + arn: string + versionId: string + versionStages: string[] + createdDate: string | null + } + error?: string +} + +export interface SecretsManagerListSecretsResponse extends ToolResponse { + output: { + secrets: Array<{ + name: string + arn: string + description: string | null + createdDate: string | null + lastChangedDate: string | null + lastAccessedDate: string | null + rotationEnabled: boolean + tags: Array<{ key: string; value: string }> + }> + nextToken: string | null + count: number + } + error?: string +} + +export interface SecretsManagerCreateSecretResponse extends ToolResponse { + output: { + message: string + name: string + arn: string + versionId: string + } + error?: string +} + +export interface SecretsManagerUpdateSecretResponse extends ToolResponse { + output: { + message: string + name: string + arn: string + versionId: string + } + error?: string +} + +export interface SecretsManagerDeleteSecretResponse extends ToolResponse { + output: { + message: string + name: string + arn: string + deletionDate: string | null + } + error?: string +} diff --git a/apps/sim/tools/secrets_manager/update_secret.ts b/apps/sim/tools/secrets_manager/update_secret.ts new file mode 100644 index 00000000000..a5a59dce681 --- /dev/null +++ b/apps/sim/tools/secrets_manager/update_secret.ts @@ -0,0 +1,94 @@ +import type { + SecretsManagerUpdateSecretParams, + SecretsManagerUpdateSecretResponse, +} from '@/tools/secrets_manager/types' +import type { ToolConfig } from '@/tools/types' + +export const updateSecretTool: ToolConfig< + SecretsManagerUpdateSecretParams, + SecretsManagerUpdateSecretResponse +> = { + id: 'secrets_manager_update_secret', + name: 'Secrets Manager Update Secret', + description: 'Update the value of an existing secret in AWS Secrets Manager', + version: '1.0', + + params: { + region: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS region (e.g., us-east-1)', + }, + accessKeyId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS access key ID', + }, + secretAccessKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'AWS secret access key', + }, + secretId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The name or ARN of the secret to update', + }, + secretValue: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'The new secret value (plain text or JSON string)', + }, + description: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Updated description of the secret', + }, + }, + + request: { + url: '/api/tools/secrets_manager/update-secret', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ + region: params.region, + accessKeyId: params.accessKeyId, + secretAccessKey: params.secretAccessKey, + secretId: params.secretId, + secretValue: params.secretValue, + description: params.description, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!response.ok) { + throw new Error(data.error || 'Failed to update secret') + } + + return { + success: true, + output: { + message: data.message || 'Secret updated successfully', + name: data.name ?? '', + arn: data.arn ?? '', + versionId: data.versionId ?? '', + }, + error: undefined, + } + }, + + outputs: { + message: { type: 'string', description: 'Operation status message' }, + name: { type: 'string', description: 'Name of the updated secret' }, + arn: { type: 'string', description: 'ARN of the updated secret' }, + versionId: { type: 'string', description: 'Version ID of the updated secret' }, + }, +} diff --git a/apps/sim/tools/tailscale/authorize_device.ts b/apps/sim/tools/tailscale/authorize_device.ts new file mode 100644 index 00000000000..f809fb95eba --- /dev/null +++ b/apps/sim/tools/tailscale/authorize_device.ts @@ -0,0 +1,78 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleAuthorizeDeviceParams, TailscaleAuthorizeDeviceResponse } from './types' + +export const tailscaleAuthorizeDeviceTool: ToolConfig< + TailscaleAuthorizeDeviceParams, + TailscaleAuthorizeDeviceResponse +> = { + id: 'tailscale_authorize_device', + name: 'Tailscale Authorize Device', + description: 'Authorize or deauthorize a device on the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + deviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Device ID to authorize', + }, + authorized: { + type: 'boolean', + required: true, + visibility: 'user-or-llm', + description: 'Whether to authorize (true) or deauthorize (false) the device', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/device/${encodeURIComponent(params.deviceId.trim())}/authorized`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => ({ + authorized: params.authorized, + }), + }, + + transformResponse: async (response: Response, params?: TailscaleAuthorizeDeviceParams) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { success: false, deviceId: '', authorized: false }, + error: (data as Record).message ?? 'Failed to authorize device', + } + } + + return { + success: true, + output: { + success: true, + deviceId: params?.deviceId ?? '', + authorized: params?.authorized ?? true, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Whether the operation succeeded' }, + deviceId: { type: 'string', description: 'Device ID' }, + authorized: { type: 'boolean', description: 'Authorization status after the operation' }, + }, +} diff --git a/apps/sim/tools/tailscale/create_auth_key.ts b/apps/sim/tools/tailscale/create_auth_key.ts new file mode 100644 index 00000000000..52ab5954408 --- /dev/null +++ b/apps/sim/tools/tailscale/create_auth_key.ts @@ -0,0 +1,172 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleCreateAuthKeyParams, TailscaleCreateAuthKeyResponse } from './types' + +export const tailscaleCreateAuthKeyTool: ToolConfig< + TailscaleCreateAuthKeyParams, + TailscaleCreateAuthKeyResponse +> = { + id: 'tailscale_create_auth_key', + name: 'Tailscale Create Auth Key', + description: 'Create a new auth key for the tailnet to pre-authorize devices', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + reusable: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether the key can be used more than once', + default: false, + }, + ephemeral: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether devices authenticated with this key are ephemeral', + default: false, + }, + preauthorized: { + type: 'boolean', + required: false, + visibility: 'user-or-llm', + description: 'Whether devices are pre-authorized (skip manual approval)', + default: true, + }, + tags: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: + 'Comma-separated list of tags for devices using this key (e.g., "tag:server,tag:prod")', + }, + description: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Description for the auth key', + }, + expirySeconds: { + type: 'number', + required: false, + visibility: 'user-or-llm', + description: 'Key expiry time in seconds (default: 90 days)', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/keys`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => { + const tags = params.tags + ? params.tags + .split(',') + .map((t) => t.trim()) + .filter(Boolean) + : [] + + const createCaps: Record = { + reusable: params.reusable ?? false, + ephemeral: params.ephemeral ?? false, + preauthorized: params.preauthorized ?? true, + } + + if (tags.length > 0) { + createCaps.tags = tags + } + + const body: Record = { + capabilities: { + devices: { + create: createCaps, + }, + }, + } + + if (params.description) body.description = params.description + if (params.expirySeconds !== undefined && params.expirySeconds !== null) + body.expirySeconds = params.expirySeconds + + return body + }, + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { + id: '', + key: '', + description: '', + created: '', + expires: '', + revoked: '', + capabilities: { reusable: false, ephemeral: false, preauthorized: false, tags: [] }, + }, + error: (data as Record).message ?? 'Failed to create auth key', + } + } + + const data = await response.json() + const deviceCaps = data.capabilities?.devices?.create ?? {} + + return { + success: true, + output: { + id: data.id ?? null, + key: data.key ?? null, + description: data.description ?? null, + created: data.created ?? null, + expires: data.expires ?? null, + revoked: data.revoked ?? null, + capabilities: { + reusable: deviceCaps.reusable ?? false, + ephemeral: deviceCaps.ephemeral ?? false, + preauthorized: deviceCaps.preauthorized ?? false, + tags: deviceCaps.tags ?? [], + }, + }, + } + }, + + outputs: { + id: { type: 'string', description: 'Auth key ID' }, + key: { type: 'string', description: 'The auth key value (only shown once at creation)' }, + description: { type: 'string', description: 'Key description', optional: true }, + created: { type: 'string', description: 'Creation timestamp' }, + expires: { type: 'string', description: 'Expiration timestamp' }, + revoked: { + type: 'string', + description: 'Revocation timestamp (empty if not revoked)', + optional: true, + }, + capabilities: { + type: 'object', + description: 'Key capabilities', + properties: { + reusable: { type: 'boolean', description: 'Whether the key is reusable' }, + ephemeral: { type: 'boolean', description: 'Whether devices are ephemeral' }, + preauthorized: { type: 'boolean', description: 'Whether devices are pre-authorized' }, + tags: { type: 'array', description: 'Tags applied to devices using this key' }, + }, + }, + }, +} diff --git a/apps/sim/tools/tailscale/delete_auth_key.ts b/apps/sim/tools/tailscale/delete_auth_key.ts new file mode 100644 index 00000000000..d4f00c75398 --- /dev/null +++ b/apps/sim/tools/tailscale/delete_auth_key.ts @@ -0,0 +1,78 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' + +interface TailscaleDeleteAuthKeyParams { + apiKey: string + tailnet: string + keyId: string +} + +interface TailscaleDeleteAuthKeyResponse extends ToolResponse { + output: { + success: boolean + keyId: string + } +} + +export const tailscaleDeleteAuthKeyTool: ToolConfig< + TailscaleDeleteAuthKeyParams, + TailscaleDeleteAuthKeyResponse +> = { + id: 'tailscale_delete_auth_key', + name: 'Tailscale Delete Auth Key', + description: 'Revoke and delete an auth key', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + keyId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Auth key ID to delete', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/keys/${encodeURIComponent(params.keyId.trim())}`, + method: 'DELETE', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response: Response, params?: TailscaleDeleteAuthKeyParams) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { success: false, keyId: '' }, + error: (data as Record).message ?? 'Failed to delete auth key', + } + } + + return { + success: true, + output: { + success: true, + keyId: params?.keyId ?? '', + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Whether the auth key was successfully deleted' }, + keyId: { type: 'string', description: 'ID of the deleted auth key' }, + }, +} diff --git a/apps/sim/tools/tailscale/delete_device.ts b/apps/sim/tools/tailscale/delete_device.ts new file mode 100644 index 00000000000..16671acec1d --- /dev/null +++ b/apps/sim/tools/tailscale/delete_device.ts @@ -0,0 +1,66 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleDeleteDeviceResponse, TailscaleDeviceParams } from './types' + +export const tailscaleDeleteDeviceTool: ToolConfig< + TailscaleDeviceParams, + TailscaleDeleteDeviceResponse +> = { + id: 'tailscale_delete_device', + name: 'Tailscale Delete Device', + description: 'Remove a device from the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + deviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Device ID to delete', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/device/${encodeURIComponent(params.deviceId.trim())}`, + method: 'DELETE', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response: Response, params?: TailscaleDeviceParams) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { success: false, deviceId: '' }, + error: (data as Record).message ?? 'Failed to delete device', + } + } + + return { + success: true, + output: { + success: true, + deviceId: params?.deviceId ?? '', + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Whether the device was successfully deleted' }, + deviceId: { type: 'string', description: 'ID of the deleted device' }, + }, +} diff --git a/apps/sim/tools/tailscale/get_acl.ts b/apps/sim/tools/tailscale/get_acl.ts new file mode 100644 index 00000000000..6d51ccbec6e --- /dev/null +++ b/apps/sim/tools/tailscale/get_acl.ts @@ -0,0 +1,72 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' +import type { TailscaleBaseParams } from './types' + +interface TailscaleGetAclResponse extends ToolResponse { + output: { + acl: string + etag: string + } +} + +export const tailscaleGetAclTool: ToolConfig = { + id: 'tailscale_get_acl', + name: 'Tailscale Get ACL', + description: 'Get the current ACL policy for the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/acl`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + Accept: 'application/json', + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { acl: '', etag: '' }, + error: (data as Record).message ?? 'Failed to get ACL', + } + } + + const etag = response.headers.get('ETag') ?? '' + const data = await response.json() + + return { + success: true, + output: { + acl: JSON.stringify(data, null, 2), + etag, + }, + } + }, + + outputs: { + acl: { type: 'string', description: 'ACL policy as JSON string' }, + etag: { + type: 'string', + description: 'ETag for the current ACL version (use with If-Match header for updates)', + optional: true, + }, + }, +} diff --git a/apps/sim/tools/tailscale/get_auth_key.ts b/apps/sim/tools/tailscale/get_auth_key.ts new file mode 100644 index 00000000000..5b4d200c03e --- /dev/null +++ b/apps/sim/tools/tailscale/get_auth_key.ts @@ -0,0 +1,119 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' + +interface TailscaleGetAuthKeyParams { + apiKey: string + tailnet: string + keyId: string +} + +interface TailscaleGetAuthKeyResponse extends ToolResponse { + output: { + id: string + description: string + created: string + expires: string + revoked: string + capabilities: { + reusable: boolean + ephemeral: boolean + preauthorized: boolean + tags: string[] + } + } +} + +export const tailscaleGetAuthKeyTool: ToolConfig< + TailscaleGetAuthKeyParams, + TailscaleGetAuthKeyResponse +> = { + id: 'tailscale_get_auth_key', + name: 'Tailscale Get Auth Key', + description: 'Get details of a specific auth key', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + keyId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Auth key ID', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/keys/${encodeURIComponent(params.keyId.trim())}`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { + id: '', + description: '', + created: '', + expires: '', + revoked: '', + capabilities: { reusable: false, ephemeral: false, preauthorized: false, tags: [] }, + }, + error: (data as Record).message ?? 'Failed to get auth key', + } + } + + const data = await response.json() + const deviceCaps = data.capabilities?.devices?.create ?? {} + + return { + success: true, + output: { + id: data.id ?? null, + description: data.description ?? null, + created: data.created ?? null, + expires: data.expires ?? null, + revoked: data.revoked ?? null, + capabilities: { + reusable: deviceCaps.reusable ?? false, + ephemeral: deviceCaps.ephemeral ?? false, + preauthorized: deviceCaps.preauthorized ?? false, + tags: deviceCaps.tags ?? [], + }, + }, + } + }, + + outputs: { + id: { type: 'string', description: 'Auth key ID' }, + description: { type: 'string', description: 'Key description', optional: true }, + created: { type: 'string', description: 'Creation timestamp' }, + expires: { type: 'string', description: 'Expiration timestamp' }, + revoked: { type: 'string', description: 'Revocation timestamp', optional: true }, + capabilities: { + type: 'object', + description: 'Key capabilities', + properties: { + reusable: { type: 'boolean', description: 'Whether the key is reusable' }, + ephemeral: { type: 'boolean', description: 'Whether devices are ephemeral' }, + preauthorized: { type: 'boolean', description: 'Whether devices are pre-authorized' }, + tags: { type: 'array', description: 'Tags applied to devices using this key' }, + }, + }, + }, +} diff --git a/apps/sim/tools/tailscale/get_device.ts b/apps/sim/tools/tailscale/get_device.ts new file mode 100644 index 00000000000..fe3ba670a7c --- /dev/null +++ b/apps/sim/tools/tailscale/get_device.ts @@ -0,0 +1,121 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleDeviceParams, TailscaleGetDeviceResponse } from './types' + +export const tailscaleGetDeviceTool: ToolConfig = + { + id: 'tailscale_get_device', + name: 'Tailscale Get Device', + description: 'Get details of a specific device by ID', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + deviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Device ID', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/device/${encodeURIComponent(params.deviceId.trim())}`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { + id: '', + name: '', + hostname: '', + user: '', + os: '', + clientVersion: '', + addresses: [], + tags: [], + authorized: false, + blocksIncomingConnections: false, + lastSeen: '', + created: '', + isExternal: false, + updateAvailable: false, + machineKey: '', + nodeKey: '', + }, + error: (data as Record).message ?? 'Failed to get device', + } + } + + const data = await response.json() + return { + success: true, + output: { + id: data.id ?? null, + name: data.name ?? null, + hostname: data.hostname ?? null, + user: data.user ?? null, + os: data.os ?? null, + clientVersion: data.clientVersion ?? null, + addresses: data.addresses ?? [], + tags: data.tags ?? [], + authorized: data.authorized ?? false, + blocksIncomingConnections: data.blocksIncomingConnections ?? false, + lastSeen: data.lastSeen ?? null, + created: data.created ?? null, + isExternal: data.isExternal ?? false, + updateAvailable: data.updateAvailable ?? false, + machineKey: data.machineKey ?? null, + nodeKey: data.nodeKey ?? null, + }, + } + }, + + outputs: { + id: { type: 'string', description: 'Device ID' }, + name: { type: 'string', description: 'Device name' }, + hostname: { type: 'string', description: 'Device hostname' }, + user: { type: 'string', description: 'Associated user' }, + os: { type: 'string', description: 'Operating system' }, + clientVersion: { type: 'string', description: 'Tailscale client version' }, + addresses: { type: 'array', description: 'Tailscale IP addresses' }, + tags: { type: 'array', description: 'Device tags' }, + authorized: { type: 'boolean', description: 'Whether the device is authorized' }, + blocksIncomingConnections: { + type: 'boolean', + description: 'Whether the device blocks incoming connections', + }, + lastSeen: { type: 'string', description: 'Last seen timestamp' }, + created: { type: 'string', description: 'Creation timestamp' }, + isExternal: { + type: 'boolean', + description: 'Whether the device is external', + optional: true, + }, + updateAvailable: { + type: 'boolean', + description: 'Whether an update is available', + optional: true, + }, + machineKey: { type: 'string', description: 'Machine key', optional: true }, + nodeKey: { type: 'string', description: 'Node key', optional: true }, + }, + } diff --git a/apps/sim/tools/tailscale/get_device_routes.ts b/apps/sim/tools/tailscale/get_device_routes.ts new file mode 100644 index 00000000000..2d5b5407506 --- /dev/null +++ b/apps/sim/tools/tailscale/get_device_routes.ts @@ -0,0 +1,67 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleDeviceParams, TailscaleGetDeviceRoutesResponse } from './types' + +export const tailscaleGetDeviceRoutesTool: ToolConfig< + TailscaleDeviceParams, + TailscaleGetDeviceRoutesResponse +> = { + id: 'tailscale_get_device_routes', + name: 'Tailscale Get Device Routes', + description: 'Get the subnet routes for a device', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + deviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Device ID', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/device/${encodeURIComponent(params.deviceId.trim())}/routes`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { advertisedRoutes: [], enabledRoutes: [] }, + error: (data as Record).message ?? 'Failed to get device routes', + } + } + + const data = await response.json() + return { + success: true, + output: { + advertisedRoutes: data.advertisedRoutes ?? [], + enabledRoutes: data.enabledRoutes ?? [], + }, + } + }, + + outputs: { + advertisedRoutes: { type: 'array', description: 'Subnet routes the device is advertising' }, + enabledRoutes: { type: 'array', description: 'Subnet routes that are approved/enabled' }, + }, +} diff --git a/apps/sim/tools/tailscale/get_dns_preferences.ts b/apps/sim/tools/tailscale/get_dns_preferences.ts new file mode 100644 index 00000000000..248f962a19c --- /dev/null +++ b/apps/sim/tools/tailscale/get_dns_preferences.ts @@ -0,0 +1,65 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' +import type { TailscaleBaseParams } from './types' + +interface TailscaleGetDnsPreferencesResponse extends ToolResponse { + output: { + magicDNS: boolean + } +} + +export const tailscaleGetDnsPreferencesTool: ToolConfig< + TailscaleBaseParams, + TailscaleGetDnsPreferencesResponse +> = { + id: 'tailscale_get_dns_preferences', + name: 'Tailscale Get DNS Preferences', + description: 'Get the DNS preferences for the tailnet including MagicDNS status', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/dns/preferences`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { magicDNS: false }, + error: (data as Record).message ?? 'Failed to get DNS preferences', + } + } + + const data = await response.json() + return { + success: true, + output: { + magicDNS: data.magicDNS ?? false, + }, + } + }, + + outputs: { + magicDNS: { type: 'boolean', description: 'Whether MagicDNS is enabled' }, + }, +} diff --git a/apps/sim/tools/tailscale/get_dns_searchpaths.ts b/apps/sim/tools/tailscale/get_dns_searchpaths.ts new file mode 100644 index 00000000000..a5f8e54b026 --- /dev/null +++ b/apps/sim/tools/tailscale/get_dns_searchpaths.ts @@ -0,0 +1,65 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' +import type { TailscaleBaseParams } from './types' + +interface TailscaleGetDnsSearchpathsResponse extends ToolResponse { + output: { + searchPaths: string[] + } +} + +export const tailscaleGetDnsSearchpathsTool: ToolConfig< + TailscaleBaseParams, + TailscaleGetDnsSearchpathsResponse +> = { + id: 'tailscale_get_dns_searchpaths', + name: 'Tailscale Get DNS Search Paths', + description: 'Get the DNS search paths configured for the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/dns/searchpaths`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { searchPaths: [] }, + error: (data as Record).message ?? 'Failed to get DNS search paths', + } + } + + const data = await response.json() + return { + success: true, + output: { + searchPaths: data.searchPaths ?? [], + }, + } + }, + + outputs: { + searchPaths: { type: 'array', description: 'List of DNS search path domains' }, + }, +} diff --git a/apps/sim/tools/tailscale/index.ts b/apps/sim/tools/tailscale/index.ts new file mode 100644 index 00000000000..b334bb12cfe --- /dev/null +++ b/apps/sim/tools/tailscale/index.ts @@ -0,0 +1,21 @@ +export { tailscaleAuthorizeDeviceTool } from './authorize_device' +export { tailscaleCreateAuthKeyTool } from './create_auth_key' +export { tailscaleDeleteAuthKeyTool } from './delete_auth_key' +export { tailscaleDeleteDeviceTool } from './delete_device' +export { tailscaleGetAclTool } from './get_acl' +export { tailscaleGetAuthKeyTool } from './get_auth_key' +export { tailscaleGetDeviceTool } from './get_device' +export { tailscaleGetDeviceRoutesTool } from './get_device_routes' +export { tailscaleGetDnsPreferencesTool } from './get_dns_preferences' +export { tailscaleGetDnsSearchpathsTool } from './get_dns_searchpaths' +export { tailscaleListAuthKeysTool } from './list_auth_keys' +export { tailscaleListDevicesTool } from './list_devices' +export { tailscaleListDnsNameserversTool } from './list_dns_nameservers' +export { tailscaleListUsersTool } from './list_users' +export { tailscaleSetDeviceRoutesTool } from './set_device_routes' +export { tailscaleSetDeviceTagsTool } from './set_device_tags' +export { tailscaleSetDnsNameserversTool } from './set_dns_nameservers' +export { tailscaleSetDnsPreferencesTool } from './set_dns_preferences' +export { tailscaleSetDnsSearchpathsTool } from './set_dns_searchpaths' +export * from './types' +export { tailscaleUpdateDeviceKeyTool } from './update_device_key' diff --git a/apps/sim/tools/tailscale/list_auth_keys.ts b/apps/sim/tools/tailscale/list_auth_keys.ts new file mode 100644 index 00000000000..2e94308e89f --- /dev/null +++ b/apps/sim/tools/tailscale/list_auth_keys.ts @@ -0,0 +1,126 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' +import type { TailscaleBaseParams } from './types' + +interface TailscaleAuthKeyOutput { + id: string + description: string + created: string + expires: string + revoked: string + capabilities: { + reusable: boolean + ephemeral: boolean + preauthorized: boolean + tags: string[] + } +} + +interface TailscaleListAuthKeysResponse extends ToolResponse { + output: { + keys: TailscaleAuthKeyOutput[] + count: number + } +} + +export const tailscaleListAuthKeysTool: ToolConfig< + TailscaleBaseParams, + TailscaleListAuthKeysResponse +> = { + id: 'tailscale_list_auth_keys', + name: 'Tailscale List Auth Keys', + description: 'List all auth keys in the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/keys`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { keys: [], count: 0 }, + error: (data as Record).message ?? 'Failed to list auth keys', + } + } + + const data = await response.json() + const keys = (data.keys ?? []).map((key: Record) => { + const caps = (key.capabilities as Record)?.devices as Record + const create = caps?.create as Record + return { + id: (key.id as string) ?? null, + description: (key.description as string) ?? null, + created: (key.created as string) ?? null, + expires: (key.expires as string) ?? null, + revoked: (key.revoked as string) ?? null, + capabilities: { + reusable: (create?.reusable as boolean) ?? false, + ephemeral: (create?.ephemeral as boolean) ?? false, + preauthorized: (create?.preauthorized as boolean) ?? false, + tags: (create?.tags as string[]) ?? [], + }, + } + }) + + return { + success: true, + output: { + keys, + count: keys.length, + }, + } + }, + + outputs: { + keys: { + type: 'array', + description: 'List of auth keys', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Auth key ID' }, + description: { type: 'string', description: 'Key description' }, + created: { type: 'string', description: 'Creation timestamp' }, + expires: { type: 'string', description: 'Expiration timestamp' }, + revoked: { type: 'string', description: 'Revocation timestamp' }, + capabilities: { + type: 'object', + description: 'Key capabilities', + properties: { + reusable: { type: 'boolean', description: 'Whether the key is reusable' }, + ephemeral: { type: 'boolean', description: 'Whether devices are ephemeral' }, + preauthorized: { type: 'boolean', description: 'Whether devices are pre-authorized' }, + tags: { type: 'array', description: 'Tags applied to devices' }, + }, + }, + }, + }, + }, + count: { + type: 'number', + description: 'Total number of auth keys', + }, + }, +} diff --git a/apps/sim/tools/tailscale/list_devices.ts b/apps/sim/tools/tailscale/list_devices.ts new file mode 100644 index 00000000000..b55835d4828 --- /dev/null +++ b/apps/sim/tools/tailscale/list_devices.ts @@ -0,0 +1,102 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleBaseParams, TailscaleListDevicesResponse } from './types' + +export const tailscaleListDevicesTool: ToolConfig< + TailscaleBaseParams, + TailscaleListDevicesResponse +> = { + id: 'tailscale_list_devices', + name: 'Tailscale List Devices', + description: 'List all devices in the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/devices`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { devices: [], count: 0 }, + error: (data as Record).message ?? 'Failed to list devices', + } + } + + const data = await response.json() + const devices = (data.devices ?? []).map((device: Record) => ({ + id: (device.id as string) ?? null, + name: (device.name as string) ?? null, + hostname: (device.hostname as string) ?? null, + user: (device.user as string) ?? null, + os: (device.os as string) ?? null, + clientVersion: (device.clientVersion as string) ?? null, + addresses: (device.addresses as string[]) ?? [], + tags: (device.tags as string[]) ?? [], + authorized: (device.authorized as boolean) ?? false, + blocksIncomingConnections: (device.blocksIncomingConnections as boolean) ?? false, + lastSeen: (device.lastSeen as string) ?? null, + created: (device.created as string) ?? null, + })) + + return { + success: true, + output: { + devices, + count: devices.length, + }, + } + }, + + outputs: { + devices: { + type: 'array', + description: 'List of devices in the tailnet', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Device ID' }, + name: { type: 'string', description: 'Device name' }, + hostname: { type: 'string', description: 'Device hostname' }, + user: { type: 'string', description: 'Associated user' }, + os: { type: 'string', description: 'Operating system' }, + clientVersion: { type: 'string', description: 'Tailscale client version' }, + addresses: { type: 'array', description: 'Tailscale IP addresses' }, + tags: { type: 'array', description: 'Device tags' }, + authorized: { type: 'boolean', description: 'Whether the device is authorized' }, + blocksIncomingConnections: { + type: 'boolean', + description: 'Whether the device blocks incoming connections', + }, + lastSeen: { type: 'string', description: 'Last seen timestamp' }, + created: { type: 'string', description: 'Creation timestamp' }, + }, + }, + }, + count: { + type: 'number', + description: 'Total number of devices', + }, + }, +} diff --git a/apps/sim/tools/tailscale/list_dns_nameservers.ts b/apps/sim/tools/tailscale/list_dns_nameservers.ts new file mode 100644 index 00000000000..67b0ac6745c --- /dev/null +++ b/apps/sim/tools/tailscale/list_dns_nameservers.ts @@ -0,0 +1,61 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleBaseParams, TailscaleListDnsNameserversResponse } from './types' + +export const tailscaleListDnsNameserversTool: ToolConfig< + TailscaleBaseParams, + TailscaleListDnsNameserversResponse +> = { + id: 'tailscale_list_dns_nameservers', + name: 'Tailscale List DNS Nameservers', + description: 'Get the DNS nameservers configured for the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/dns/nameservers`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { dns: [], magicDNS: false }, + error: (data as Record).message ?? 'Failed to list DNS nameservers', + } + } + + const data = await response.json() + return { + success: true, + output: { + dns: data.dns ?? [], + magicDNS: data.magicDNS ?? false, + }, + } + }, + + outputs: { + dns: { type: 'array', description: 'List of DNS nameserver addresses' }, + magicDNS: { type: 'boolean', description: 'Whether MagicDNS is enabled' }, + }, +} diff --git a/apps/sim/tools/tailscale/list_users.ts b/apps/sim/tools/tailscale/list_users.ts new file mode 100644 index 00000000000..100719d637b --- /dev/null +++ b/apps/sim/tools/tailscale/list_users.ts @@ -0,0 +1,96 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleBaseParams, TailscaleListUsersResponse } from './types' + +export const tailscaleListUsersTool: ToolConfig = { + id: 'tailscale_list_users', + name: 'Tailscale List Users', + description: 'List all users in the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/users`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { users: [], count: 0 }, + error: (data as Record).message ?? 'Failed to list users', + } + } + + const data = await response.json() + const users = (data.users ?? []).map((user: Record) => ({ + id: (user.id as string) ?? null, + displayName: (user.displayName as string) ?? null, + loginName: (user.loginName as string) ?? null, + profilePicURL: (user.profilePicURL as string) ?? null, + role: (user.role as string) ?? null, + status: (user.status as string) ?? null, + type: (user.type as string) ?? null, + created: (user.created as string) ?? null, + lastSeen: (user.lastSeen as string) ?? null, + deviceCount: (user.deviceCount as number) ?? 0, + })) + + return { + success: true, + output: { + users, + count: users.length, + }, + } + }, + + outputs: { + users: { + type: 'array', + description: 'List of users in the tailnet', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'User ID' }, + displayName: { type: 'string', description: 'Display name' }, + loginName: { type: 'string', description: 'Login name / email' }, + profilePicURL: { type: 'string', description: 'Profile picture URL', optional: true }, + role: { type: 'string', description: 'User role (owner, admin, member, etc.)' }, + status: { type: 'string', description: 'User status (active, suspended, etc.)' }, + type: { type: 'string', description: 'User type (member, shared, tagged)' }, + created: { type: 'string', description: 'Creation timestamp' }, + lastSeen: { type: 'string', description: 'Last seen timestamp', optional: true }, + deviceCount: { + type: 'number', + description: 'Number of devices owned by user', + optional: true, + }, + }, + }, + }, + count: { + type: 'number', + description: 'Total number of users', + }, + }, +} diff --git a/apps/sim/tools/tailscale/set_device_routes.ts b/apps/sim/tools/tailscale/set_device_routes.ts new file mode 100644 index 00000000000..49b3ba3ca31 --- /dev/null +++ b/apps/sim/tools/tailscale/set_device_routes.ts @@ -0,0 +1,81 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleSetDeviceRoutesParams, TailscaleSetDeviceRoutesResponse } from './types' + +export const tailscaleSetDeviceRoutesTool: ToolConfig< + TailscaleSetDeviceRoutesParams, + TailscaleSetDeviceRoutesResponse +> = { + id: 'tailscale_set_device_routes', + name: 'Tailscale Set Device Routes', + description: 'Set the enabled subnet routes for a device', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + deviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Device ID', + }, + routes: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: + 'Comma-separated list of subnet routes to enable (e.g., "10.0.0.0/24,192.168.1.0/24")', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/device/${encodeURIComponent(params.deviceId.trim())}/routes`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => ({ + routes: params.routes + .split(',') + .map((r) => r.trim()) + .filter(Boolean), + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { advertisedRoutes: [], enabledRoutes: [] }, + error: (data as Record).message ?? 'Failed to set device routes', + } + } + + const data = await response.json() + return { + success: true, + output: { + advertisedRoutes: data.advertisedRoutes ?? [], + enabledRoutes: data.enabledRoutes ?? [], + }, + } + }, + + outputs: { + advertisedRoutes: { type: 'array', description: 'Subnet routes the device is advertising' }, + enabledRoutes: { type: 'array', description: 'Subnet routes that are now enabled' }, + }, +} diff --git a/apps/sim/tools/tailscale/set_device_tags.ts b/apps/sim/tools/tailscale/set_device_tags.ts new file mode 100644 index 00000000000..b760a7b79ce --- /dev/null +++ b/apps/sim/tools/tailscale/set_device_tags.ts @@ -0,0 +1,88 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleSetDeviceTagsParams, TailscaleSetDeviceTagsResponse } from './types' + +export const tailscaleSetDeviceTagsTool: ToolConfig< + TailscaleSetDeviceTagsParams, + TailscaleSetDeviceTagsResponse +> = { + id: 'tailscale_set_device_tags', + name: 'Tailscale Set Device Tags', + description: 'Set tags on a device in the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + deviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Device ID', + }, + tags: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Comma-separated list of tags (e.g., "tag:server,tag:production")', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/device/${encodeURIComponent(params.deviceId.trim())}/tags`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => ({ + tags: params.tags + .split(',') + .map((t) => t.trim()) + .filter(Boolean), + }), + }, + + transformResponse: async (response: Response, params?: TailscaleSetDeviceTagsParams) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { success: false, deviceId: '', tags: [] }, + error: (data as Record).message ?? 'Failed to set device tags', + } + } + + const tags = params?.tags + ? params.tags + .split(',') + .map((t) => t.trim()) + .filter(Boolean) + : [] + + return { + success: true, + output: { + success: true, + deviceId: params?.deviceId ?? '', + tags, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Whether the tags were successfully set' }, + deviceId: { type: 'string', description: 'Device ID' }, + tags: { type: 'array', description: 'Tags set on the device' }, + }, +} diff --git a/apps/sim/tools/tailscale/set_dns_nameservers.ts b/apps/sim/tools/tailscale/set_dns_nameservers.ts new file mode 100644 index 00000000000..52ecbe7ece3 --- /dev/null +++ b/apps/sim/tools/tailscale/set_dns_nameservers.ts @@ -0,0 +1,86 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' + +interface TailscaleSetDnsNameserversParams { + apiKey: string + tailnet: string + dns: string +} + +interface TailscaleSetDnsNameserversResponse extends ToolResponse { + output: { + dns: string[] + magicDNS: boolean + } +} + +export const tailscaleSetDnsNameserversTool: ToolConfig< + TailscaleSetDnsNameserversParams, + TailscaleSetDnsNameserversResponse +> = { + id: 'tailscale_set_dns_nameservers', + name: 'Tailscale Set DNS Nameservers', + description: 'Set the DNS nameservers for the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + dns: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Comma-separated list of DNS nameserver IP addresses (e.g., "8.8.8.8,8.8.4.4")', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/dns/nameservers`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => ({ + dns: params.dns + .split(',') + .map((s) => s.trim()) + .filter(Boolean), + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { dns: [], magicDNS: false }, + error: (data as Record).message ?? 'Failed to set DNS nameservers', + } + } + + const data = await response.json() + return { + success: true, + output: { + dns: data.dns ?? [], + magicDNS: data.magicDNS ?? false, + }, + } + }, + + outputs: { + dns: { type: 'array', description: 'Updated list of DNS nameserver addresses' }, + magicDNS: { type: 'boolean', description: 'Whether MagicDNS is enabled' }, + }, +} diff --git a/apps/sim/tools/tailscale/set_dns_preferences.ts b/apps/sim/tools/tailscale/set_dns_preferences.ts new file mode 100644 index 00000000000..d39bafbb827 --- /dev/null +++ b/apps/sim/tools/tailscale/set_dns_preferences.ts @@ -0,0 +1,80 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' + +interface TailscaleSetDnsPreferencesParams { + apiKey: string + tailnet: string + magicDNS: boolean +} + +interface TailscaleSetDnsPreferencesResponse extends ToolResponse { + output: { + magicDNS: boolean + } +} + +export const tailscaleSetDnsPreferencesTool: ToolConfig< + TailscaleSetDnsPreferencesParams, + TailscaleSetDnsPreferencesResponse +> = { + id: 'tailscale_set_dns_preferences', + name: 'Tailscale Set DNS Preferences', + description: 'Set DNS preferences for the tailnet (enable/disable MagicDNS)', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + magicDNS: { + type: 'boolean', + required: true, + visibility: 'user-or-llm', + description: 'Whether to enable (true) or disable (false) MagicDNS', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/dns/preferences`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => ({ + magicDNS: params.magicDNS, + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { magicDNS: false }, + error: (data as Record).message ?? 'Failed to set DNS preferences', + } + } + + const data = await response.json() + return { + success: true, + output: { + magicDNS: data.magicDNS ?? false, + }, + } + }, + + outputs: { + magicDNS: { type: 'boolean', description: 'Updated MagicDNS status' }, + }, +} diff --git a/apps/sim/tools/tailscale/set_dns_searchpaths.ts b/apps/sim/tools/tailscale/set_dns_searchpaths.ts new file mode 100644 index 00000000000..31d52f0328c --- /dev/null +++ b/apps/sim/tools/tailscale/set_dns_searchpaths.ts @@ -0,0 +1,84 @@ +import type { ToolConfig, ToolResponse } from '@/tools/types' + +interface TailscaleSetDnsSearchpathsParams { + apiKey: string + tailnet: string + searchPaths: string +} + +interface TailscaleSetDnsSearchpathsResponse extends ToolResponse { + output: { + searchPaths: string[] + } +} + +export const tailscaleSetDnsSearchpathsTool: ToolConfig< + TailscaleSetDnsSearchpathsParams, + TailscaleSetDnsSearchpathsResponse +> = { + id: 'tailscale_set_dns_searchpaths', + name: 'Tailscale Set DNS Search Paths', + description: 'Set the DNS search paths for the tailnet', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + searchPaths: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: + 'Comma-separated list of DNS search path domains (e.g., "corp.example.com,internal.example.com")', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/tailnet/${encodeURIComponent(params.tailnet.trim())}/dns/searchpaths`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => ({ + searchPaths: params.searchPaths + .split(',') + .map((s) => s.trim()) + .filter(Boolean), + }), + }, + + transformResponse: async (response) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { searchPaths: [] }, + error: (data as Record).message ?? 'Failed to set DNS search paths', + } + } + + const data = await response.json() + return { + success: true, + output: { + searchPaths: data.searchPaths ?? [], + }, + } + }, + + outputs: { + searchPaths: { type: 'array', description: 'Updated list of DNS search path domains' }, + }, +} diff --git a/apps/sim/tools/tailscale/types.ts b/apps/sim/tools/tailscale/types.ts new file mode 100644 index 00000000000..80c8180bf91 --- /dev/null +++ b/apps/sim/tools/tailscale/types.ts @@ -0,0 +1,155 @@ +import type { ToolResponse } from '@/tools/types' + +export interface TailscaleBaseParams { + apiKey: string + tailnet: string +} + +export interface TailscaleDeviceParams extends TailscaleBaseParams { + deviceId: string +} + +export interface TailscaleSetDeviceTagsParams extends TailscaleDeviceParams { + tags: string +} + +export interface TailscaleAuthorizeDeviceParams extends TailscaleDeviceParams { + authorized: boolean +} + +export interface TailscaleSetDeviceRoutesParams extends TailscaleDeviceParams { + routes: string +} + +export interface TailscaleCreateAuthKeyParams extends TailscaleBaseParams { + reusable: boolean + ephemeral: boolean + preauthorized: boolean + tags?: string + description?: string + expirySeconds?: number +} + +export interface TailscaleDeviceOutput { + id: string + name: string + hostname: string + user: string + os: string + clientVersion: string + addresses: string[] + tags: string[] + authorized: boolean + blocksIncomingConnections: boolean + lastSeen: string + created: string +} + +export interface TailscaleUserOutput { + id: string + displayName: string + loginName: string + profilePicURL: string + role: string + status: string + type: string + created: string + lastSeen: string + deviceCount: number +} + +export interface TailscaleListDevicesResponse extends ToolResponse { + output: { + devices: TailscaleDeviceOutput[] + count: number + } +} + +export interface TailscaleGetDeviceResponse extends ToolResponse { + output: TailscaleDeviceOutput & { + isExternal: boolean + updateAvailable: boolean + machineKey: string + nodeKey: string + } +} + +export interface TailscaleUpdateDeviceKeyParams extends TailscaleDeviceParams { + keyExpiryDisabled: boolean +} + +export interface TailscaleUpdateDeviceKeyResponse extends ToolResponse { + output: { + success: boolean + deviceId: string + keyExpiryDisabled: boolean + } +} + +export interface TailscaleDeleteDeviceResponse extends ToolResponse { + output: { + success: boolean + deviceId: string + } +} + +export interface TailscaleAuthorizeDeviceResponse extends ToolResponse { + output: { + success: boolean + deviceId: string + authorized: boolean + } +} + +export interface TailscaleSetDeviceTagsResponse extends ToolResponse { + output: { + success: boolean + deviceId: string + tags: string[] + } +} + +export interface TailscaleGetDeviceRoutesResponse extends ToolResponse { + output: { + advertisedRoutes: string[] + enabledRoutes: string[] + } +} + +export interface TailscaleSetDeviceRoutesResponse extends ToolResponse { + output: { + advertisedRoutes: string[] + enabledRoutes: string[] + } +} + +export interface TailscaleListDnsNameserversResponse extends ToolResponse { + output: { + dns: string[] + magicDNS: boolean + } +} + +export interface TailscaleListUsersResponse extends ToolResponse { + output: { + users: TailscaleUserOutput[] + count: number + } +} + +export interface TailscaleCreateAuthKeyResponse extends ToolResponse { + output: { + id: string + key: string + description: string + created: string + expires: string + revoked: string + capabilities: { + reusable: boolean + ephemeral: boolean + preauthorized: boolean + tags: string[] + } + } +} diff --git a/apps/sim/tools/tailscale/update_device_key.ts b/apps/sim/tools/tailscale/update_device_key.ts new file mode 100644 index 00000000000..a26ff3b3ad4 --- /dev/null +++ b/apps/sim/tools/tailscale/update_device_key.ts @@ -0,0 +1,78 @@ +import type { ToolConfig } from '@/tools/types' +import type { TailscaleUpdateDeviceKeyParams, TailscaleUpdateDeviceKeyResponse } from './types' + +export const tailscaleUpdateDeviceKeyTool: ToolConfig< + TailscaleUpdateDeviceKeyParams, + TailscaleUpdateDeviceKeyResponse +> = { + id: 'tailscale_update_device_key', + name: 'Tailscale Update Device Key', + description: 'Enable or disable key expiry on a device', + version: '1.0.0', + + params: { + apiKey: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Tailscale API key', + }, + tailnet: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Tailnet name (e.g., example.com) or "-" for default', + }, + deviceId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Device ID', + }, + keyExpiryDisabled: { + type: 'boolean', + required: true, + visibility: 'user-or-llm', + description: 'Whether to disable key expiry (true) or enable it (false)', + }, + }, + + request: { + url: (params) => + `https://api.tailscale.com/api/v2/device/${encodeURIComponent(params.deviceId.trim())}/key`, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.apiKey.trim()}`, + 'Content-Type': 'application/json', + }), + body: (params) => ({ + keyExpiryDisabled: params.keyExpiryDisabled, + }), + }, + + transformResponse: async (response: Response, params?: TailscaleUpdateDeviceKeyParams) => { + if (!response.ok) { + const data = await response.json().catch(() => ({})) + return { + success: false, + output: { success: false, deviceId: '', keyExpiryDisabled: false }, + error: (data as Record).message ?? 'Failed to update device key', + } + } + + return { + success: true, + output: { + success: true, + deviceId: params?.deviceId ?? '', + keyExpiryDisabled: params?.keyExpiryDisabled ?? true, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Whether the operation succeeded' }, + deviceId: { type: 'string', description: 'Device ID' }, + keyExpiryDisabled: { type: 'boolean', description: 'Whether key expiry is now disabled' }, + }, +} diff --git a/apps/sim/tools/types.ts b/apps/sim/tools/types.ts index 320caac26c3..7e6ed926c3e 100644 --- a/apps/sim/tools/types.ts +++ b/apps/sim/tools/types.ts @@ -6,6 +6,7 @@ export type BYOKProviderId = | 'anthropic' | 'google' | 'mistral' + | 'fireworks' | 'firecrawl' | 'exa' | 'serper' diff --git a/apps/sim/tools/utils.server.ts b/apps/sim/tools/utils.server.ts index dca9880c4ad..7abce99e125 100644 --- a/apps/sim/tools/utils.server.ts +++ b/apps/sim/tools/utils.server.ts @@ -1,14 +1,26 @@ import { createLogger } from '@sim/logger' +import { generateInternalToken } from '@/lib/auth/internal' import { secureFetchWithPinnedIP, validateUrlWithDNS, } from '@/lib/core/security/input-validation.server' +import { getInternalApiBaseUrl } from '@/lib/core/utils/urls' +import { isCustomTool } from '@/executor/constants' +import type { CustomToolDefinition } from '@/hooks/queries/custom-tools' import { extractErrorMessage } from '@/tools/error-extractors' +import { tools } from '@/tools/registry' import type { ToolConfig, ToolResponse } from '@/tools/types' import type { RequestParams } from '@/tools/utils' +import { createCustomToolRequestBody, createParamSchema, createToolConfig } from '@/tools/utils' const logger = createLogger('ToolsUtils') +export interface GetToolAsyncContext { + workflowId?: string + userId?: string + workspaceId?: string +} + /** * Execute the actual request and transform the response. * Server-only: uses DNS validation and IP-pinned fetch. @@ -75,3 +87,88 @@ export async function executeRequest( } } } + +// Get a tool by its ID asynchronously (supports server-side) +export async function getToolAsync( + toolId: string, + context: GetToolAsyncContext = {} +): Promise { + const builtInTool = tools[toolId] + if (builtInTool) return builtInTool + + if (isCustomTool(toolId)) { + return fetchCustomToolFromAPI(toolId, context) + } + + return undefined +} + +async function fetchCustomToolFromAPI( + customToolId: string, + context: GetToolAsyncContext +): Promise { + const { workflowId, userId, workspaceId } = context + const identifier = customToolId.replace('custom_', '') + + try { + const baseUrl = getInternalApiBaseUrl() + const url = new URL('/api/tools/custom', baseUrl) + + if (workflowId) { + url.searchParams.append('workflowId', workflowId) + } + if (userId) { + url.searchParams.append('userId', userId) + } + if (workspaceId) { + url.searchParams.append('workspaceId', workspaceId) + } + + const headers: Record = {} + + try { + const internalToken = await generateInternalToken(userId) + headers.Authorization = `Bearer ${internalToken}` + } catch (error) { + logger.warn('Failed to generate internal token for custom tools fetch', { error }) + } + + const response = await fetch(url.toString(), { headers }) + + if (!response.ok) { + await response.text().catch(() => {}) + logger.error(`Failed to fetch custom tools: ${response.statusText}`) + return undefined + } + + const result = await response.json() + + if (!result.data || !Array.isArray(result.data)) { + logger.error(`Invalid response when fetching custom tools: ${JSON.stringify(result)}`) + return undefined + } + + const customTool = result.data.find( + (tool: CustomToolDefinition) => tool.id === identifier || tool.title === identifier + ) as CustomToolDefinition | undefined + + if (!customTool) { + logger.error(`Custom tool not found: ${identifier}`) + return undefined + } + + const toolConfig = createToolConfig(customTool, customToolId) + + return { + ...toolConfig, + params: createParamSchema(customTool), + request: { + ...toolConfig.request, + body: createCustomToolRequestBody(customTool, false, workflowId), + }, + } + } catch (error) { + logger.error(`Error fetching custom tool ${identifier} from API:`, error) + return undefined + } +} diff --git a/apps/sim/tools/utils.ts b/apps/sim/tools/utils.ts index 581d4a6ac58..2f944c18bd4 100644 --- a/apps/sim/tools/utils.ts +++ b/apps/sim/tools/utils.ts @@ -1,8 +1,6 @@ import { createLogger } from '@sim/logger' import { getMaxExecutionTimeout } from '@/lib/core/execution-limits' -import { getInternalApiBaseUrl } from '@/lib/core/utils/urls' -import { AGENT, isCustomTool } from '@/executor/constants' -import { getCustomTool } from '@/hooks/queries/custom-tools' +import type { CustomToolDefinition } from '@/hooks/queries/custom-tools' import { useEnvironmentStore } from '@/stores/settings/environment' import { tools } from '@/tools/registry' import type { ToolConfig } from '@/tools/types' @@ -286,48 +284,20 @@ export function createCustomToolRequestBody( } // Get a tool by its ID -export function getTool(toolId: string): ToolConfig | undefined { +export function getTool(toolId: string, _workspaceId?: string): ToolConfig | undefined { // Check for built-in tools const builtInTool = tools[toolId] if (builtInTool) return builtInTool - // Check if it's a custom tool - if (isCustomTool(toolId) && typeof window !== 'undefined') { - // Only try to use the sync version on the client - const identifier = toolId.slice(AGENT.CUSTOM_TOOL_PREFIX.length) - - // Try to find the tool from query cache (extracts workspaceId from URL) - const customTool = getCustomTool(identifier) - - if (customTool) { - return createToolConfig(customTool, toolId) - } - } - // If not found or running on the server, return undefined return undefined } -// Get a tool by its ID asynchronously (supports server-side) -export async function getToolAsync( - toolId: string, - workflowId?: string, - userId?: string -): Promise { - // Check for built-in tools - const builtInTool = tools[toolId] - if (builtInTool) return builtInTool - - // Check if it's a custom tool - if (isCustomTool(toolId)) { - return fetchCustomToolFromAPI(toolId, workflowId, userId) - } - - return undefined -} - // Helper function to create a tool config from a custom tool -function createToolConfig(customTool: any, customToolId: string): ToolConfig { +export function createToolConfig( + customTool: CustomToolDefinition, + customToolId: string +): ToolConfig { // Create a parameter schema from the custom tool schema const params = createParamSchema(customTool) @@ -363,102 +333,3 @@ function createToolConfig(customTool: any, customToolId: string): ToolConfig { }, } } - -// Create a tool config from a custom tool definition by fetching from API -async function fetchCustomToolFromAPI( - customToolId: string, - workflowId?: string, - userId?: string -): Promise { - const identifier = customToolId.replace('custom_', '') - - try { - const baseUrl = getInternalApiBaseUrl() - const url = new URL('/api/tools/custom', baseUrl) - - if (workflowId) { - url.searchParams.append('workflowId', workflowId) - } - if (userId) { - url.searchParams.append('userId', userId) - } - - // For server-side calls (during workflow execution), use internal JWT token - const headers: Record = {} - if (typeof window === 'undefined') { - try { - const { generateInternalToken } = await import('@/lib/auth/internal') - const internalToken = await generateInternalToken(userId) - headers.Authorization = `Bearer ${internalToken}` - } catch (error) { - logger.warn('Failed to generate internal token for custom tools fetch', { error }) - // Continue without token - will fail auth and be reported upstream - } - } - - const response = await fetch(url.toString(), { - headers, - }) - - if (!response.ok) { - await response.text().catch(() => {}) - logger.error(`Failed to fetch custom tools: ${response.statusText}`) - return undefined - } - - const result = await response.json() - - if (!result.data || !Array.isArray(result.data)) { - logger.error(`Invalid response when fetching custom tools: ${JSON.stringify(result)}`) - return undefined - } - - // Try to find the tool by ID or title - const customTool = result.data.find( - (tool: any) => tool.id === identifier || tool.title === identifier - ) - - if (!customTool) { - logger.error(`Custom tool not found: ${identifier}`) - return undefined - } - - // Create a parameter schema - const params = createParamSchema(customTool) - - // Create a tool config for the custom tool - return { - id: customToolId, - name: customTool.title, - description: customTool.schema.function?.description || '', - version: '1.0.0', - params, - - // Request configuration - for custom tools we'll use the execute endpoint - request: { - url: '/api/function/execute', - method: 'POST', - headers: () => ({ 'Content-Type': 'application/json' }), - body: createCustomToolRequestBody(customTool, false, workflowId), - }, - - // Same response handling as client-side - transformResponse: async (response: Response) => { - const data = await response.json() - - if (!data.success) { - throw new Error(data.error || 'Custom tool execution failed') - } - - return { - success: true, - output: data.output.result || data.output, - error: undefined, - } - }, - } - } catch (error) { - logger.error(`Error fetching custom tool ${identifier} from API:`, error) - return undefined - } -} diff --git a/apps/sim/triggers/attio/index.ts b/apps/sim/triggers/attio/index.ts index f8c0e27f294..a6e77f3a634 100644 --- a/apps/sim/triggers/attio/index.ts +++ b/apps/sim/triggers/attio/index.ts @@ -2,9 +2,12 @@ export { attioCommentCreatedTrigger } from './comment_created' export { attioCommentDeletedTrigger } from './comment_deleted' export { attioCommentResolvedTrigger } from './comment_resolved' export { attioCommentUnresolvedTrigger } from './comment_unresolved' +export { attioListCreatedTrigger } from './list_created' +export { attioListDeletedTrigger } from './list_deleted' export { attioListEntryCreatedTrigger } from './list_entry_created' export { attioListEntryDeletedTrigger } from './list_entry_deleted' export { attioListEntryUpdatedTrigger } from './list_entry_updated' +export { attioListUpdatedTrigger } from './list_updated' export { attioNoteCreatedTrigger } from './note_created' export { attioNoteDeletedTrigger } from './note_deleted' export { attioNoteUpdatedTrigger } from './note_updated' @@ -16,3 +19,4 @@ export { attioTaskCreatedTrigger } from './task_created' export { attioTaskDeletedTrigger } from './task_deleted' export { attioTaskUpdatedTrigger } from './task_updated' export { attioWebhookTrigger } from './webhook' +export { attioWorkspaceMemberCreatedTrigger } from './workspace_member_created' diff --git a/apps/sim/triggers/attio/list_created.ts b/apps/sim/triggers/attio/list_created.ts new file mode 100644 index 00000000000..5f55567356f --- /dev/null +++ b/apps/sim/triggers/attio/list_created.ts @@ -0,0 +1,29 @@ +import { AttioIcon } from '@/components/icons' +import { buildAttioTriggerSubBlocks, buildListOutputs } from '@/triggers/attio/utils' +import type { TriggerConfig } from '@/triggers/types' + +/** + * Attio List Created Trigger + * + * Triggers when a list is created in Attio. + */ +export const attioListCreatedTrigger: TriggerConfig = { + id: 'attio_list_created', + name: 'Attio List Created', + provider: 'attio', + description: 'Trigger workflow when a list is created in Attio', + version: '1.0.0', + icon: AttioIcon, + + subBlocks: buildAttioTriggerSubBlocks('attio_list_created'), + + outputs: buildListOutputs(), + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Attio-Signature': 'hmac-sha256-signature', + }, + }, +} diff --git a/apps/sim/triggers/attio/list_deleted.ts b/apps/sim/triggers/attio/list_deleted.ts new file mode 100644 index 00000000000..9796cf257bb --- /dev/null +++ b/apps/sim/triggers/attio/list_deleted.ts @@ -0,0 +1,29 @@ +import { AttioIcon } from '@/components/icons' +import { buildAttioTriggerSubBlocks, buildListOutputs } from '@/triggers/attio/utils' +import type { TriggerConfig } from '@/triggers/types' + +/** + * Attio List Deleted Trigger + * + * Triggers when a list is deleted in Attio. + */ +export const attioListDeletedTrigger: TriggerConfig = { + id: 'attio_list_deleted', + name: 'Attio List Deleted', + provider: 'attio', + description: 'Trigger workflow when a list is deleted in Attio', + version: '1.0.0', + icon: AttioIcon, + + subBlocks: buildAttioTriggerSubBlocks('attio_list_deleted'), + + outputs: buildListOutputs(), + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Attio-Signature': 'hmac-sha256-signature', + }, + }, +} diff --git a/apps/sim/triggers/attio/list_updated.ts b/apps/sim/triggers/attio/list_updated.ts new file mode 100644 index 00000000000..005cab5f6fe --- /dev/null +++ b/apps/sim/triggers/attio/list_updated.ts @@ -0,0 +1,29 @@ +import { AttioIcon } from '@/components/icons' +import { buildAttioTriggerSubBlocks, buildListOutputs } from '@/triggers/attio/utils' +import type { TriggerConfig } from '@/triggers/types' + +/** + * Attio List Updated Trigger + * + * Triggers when a list is updated in Attio. + */ +export const attioListUpdatedTrigger: TriggerConfig = { + id: 'attio_list_updated', + name: 'Attio List Updated', + provider: 'attio', + description: 'Trigger workflow when a list is updated in Attio', + version: '1.0.0', + icon: AttioIcon, + + subBlocks: buildAttioTriggerSubBlocks('attio_list_updated'), + + outputs: buildListOutputs(), + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Attio-Signature': 'hmac-sha256-signature', + }, + }, +} diff --git a/apps/sim/triggers/attio/utils.ts b/apps/sim/triggers/attio/utils.ts index 11dfa89c309..9276f91ad46 100644 --- a/apps/sim/triggers/attio/utils.ts +++ b/apps/sim/triggers/attio/utils.ts @@ -19,6 +19,10 @@ export const attioTriggerOptions = [ { label: 'List Entry Created', id: 'attio_list_entry_created' }, { label: 'List Entry Updated', id: 'attio_list_entry_updated' }, { label: 'List Entry Deleted', id: 'attio_list_entry_deleted' }, + { label: 'List Created', id: 'attio_list_created' }, + { label: 'List Updated', id: 'attio_list_updated' }, + { label: 'List Deleted', id: 'attio_list_deleted' }, + { label: 'Workspace Member Created', id: 'attio_workspace_member_created' }, { label: 'Generic Webhook (All Events)', id: 'attio_webhook' }, ] @@ -235,6 +239,42 @@ export function buildCommentOutputs(): Record { } } +/** + * List event outputs. + */ +function buildListIdOutputs(): Record { + return { + workspaceId: { type: 'string', description: 'The workspace ID' }, + listId: { type: 'string', description: 'The list ID' }, + } +} + +/** List created/updated/deleted outputs. */ +export function buildListOutputs(): Record { + return { + ...buildBaseWebhookOutputs(), + ...buildListIdOutputs(), + } +} + +/** + * Workspace member event outputs. + */ +function buildWorkspaceMemberIdOutputs(): Record { + return { + workspaceId: { type: 'string', description: 'The workspace ID' }, + workspaceMemberId: { type: 'string', description: 'The workspace member ID' }, + } +} + +/** Workspace member outputs. */ +export function buildWorkspaceMemberOutputs(): Record { + return { + ...buildBaseWebhookOutputs(), + ...buildWorkspaceMemberIdOutputs(), + } +} + /** List entry created/deleted outputs. */ export function buildListEntryOutputs(): Record { return { @@ -276,7 +316,7 @@ export const TRIGGER_EVENT_MAP: Record = { attio_record_deleted: ['record.deleted'], attio_record_merged: ['record.merged'], attio_note_created: ['note.created'], - attio_note_updated: ['note.updated', 'note.content-updated'], + attio_note_updated: ['note.updated', 'note-content.updated'], attio_note_deleted: ['note.deleted'], attio_task_created: ['task.created'], attio_task_updated: ['task.updated'], @@ -288,6 +328,10 @@ export const TRIGGER_EVENT_MAP: Record = { attio_list_entry_created: ['list-entry.created'], attio_list_entry_updated: ['list-entry.updated'], attio_list_entry_deleted: ['list-entry.deleted'], + attio_list_created: ['list.created'], + attio_list_updated: ['list.updated'], + attio_list_deleted: ['list.deleted'], + attio_workspace_member_created: ['workspace-member.created'], } /** @@ -445,6 +489,35 @@ export function extractAttioListEntryUpdatedData( } } +/** + * Extracts formatted data from an Attio list event payload. + * Used for list.created, list.updated, list.deleted triggers. + */ +export function extractAttioListData(body: Record): Record { + const event = getAttioEvent(body) ?? {} + const id = (event.id as Record) ?? {} + return { + eventType: event.event_type ?? null, + workspaceId: id.workspace_id ?? null, + listId: id.list_id ?? null, + } +} + +/** + * Extracts formatted data from an Attio workspace-member.created event payload. + */ +export function extractAttioWorkspaceMemberData( + body: Record +): Record { + const event = getAttioEvent(body) ?? {} + const id = (event.id as Record) ?? {} + return { + eventType: event.event_type ?? null, + workspaceId: id.workspace_id ?? null, + workspaceMemberId: id.workspace_member_id ?? null, + } +} + /** * Extracts formatted data from a generic Attio webhook payload. * Passes through the first event with camelCase field mapping. diff --git a/apps/sim/triggers/attio/workspace_member_created.ts b/apps/sim/triggers/attio/workspace_member_created.ts new file mode 100644 index 00000000000..6d6d8145e62 --- /dev/null +++ b/apps/sim/triggers/attio/workspace_member_created.ts @@ -0,0 +1,29 @@ +import { AttioIcon } from '@/components/icons' +import { buildAttioTriggerSubBlocks, buildWorkspaceMemberOutputs } from '@/triggers/attio/utils' +import type { TriggerConfig } from '@/triggers/types' + +/** + * Attio Workspace Member Created Trigger + * + * Triggers when a new member is added to the Attio workspace. + */ +export const attioWorkspaceMemberCreatedTrigger: TriggerConfig = { + id: 'attio_workspace_member_created', + name: 'Attio Workspace Member Created', + provider: 'attio', + description: 'Trigger workflow when a new member is added to the Attio workspace', + version: '1.0.0', + icon: AttioIcon, + + subBlocks: buildAttioTriggerSubBlocks('attio_workspace_member_created'), + + outputs: buildWorkspaceMemberOutputs(), + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Attio-Signature': 'hmac-sha256-signature', + }, + }, +} diff --git a/apps/sim/triggers/registry.ts b/apps/sim/triggers/registry.ts index b0c61fe8c1c..4390bfeefff 100644 --- a/apps/sim/triggers/registry.ts +++ b/apps/sim/triggers/registry.ts @@ -12,9 +12,12 @@ import { attioCommentDeletedTrigger, attioCommentResolvedTrigger, attioCommentUnresolvedTrigger, + attioListCreatedTrigger, + attioListDeletedTrigger, attioListEntryCreatedTrigger, attioListEntryDeletedTrigger, attioListEntryUpdatedTrigger, + attioListUpdatedTrigger, attioNoteCreatedTrigger, attioNoteDeletedTrigger, attioNoteUpdatedTrigger, @@ -26,6 +29,7 @@ import { attioTaskDeletedTrigger, attioTaskUpdatedTrigger, attioWebhookTrigger, + attioWorkspaceMemberCreatedTrigger, } from '@/triggers/attio' import { calcomBookingCancelledTrigger, @@ -200,6 +204,10 @@ export const TRIGGER_REGISTRY: TriggerRegistry = { attio_list_entry_created: attioListEntryCreatedTrigger, attio_list_entry_updated: attioListEntryUpdatedTrigger, attio_list_entry_deleted: attioListEntryDeletedTrigger, + attio_list_created: attioListCreatedTrigger, + attio_list_updated: attioListUpdatedTrigger, + attio_list_deleted: attioListDeletedTrigger, + attio_workspace_member_created: attioWorkspaceMemberCreatedTrigger, calendly_webhook: calendlyWebhookTrigger, calendly_invitee_created: calendlyInviteeCreatedTrigger, calendly_invitee_canceled: calendlyInviteeCanceledTrigger, diff --git a/bun.lock b/bun.lock index c2663c97fd3..181248e1ee7 100644 --- a/bun.lock +++ b/bun.lock @@ -60,6 +60,7 @@ "@aws-sdk/client-dynamodb": "3.940.0", "@aws-sdk/client-rds-data": "3.940.0", "@aws-sdk/client-s3": "^3.779.0", + "@aws-sdk/client-secrets-manager": "3.1021.0", "@aws-sdk/client-sqs": "3.947.0", "@aws-sdk/lib-dynamodb": "3.940.0", "@aws-sdk/s3-request-presigner": "^3.779.0", @@ -419,6 +420,8 @@ "@aws-sdk/client-s3": ["@aws-sdk/client-s3@3.1015.0", "", { "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.24", "@aws-sdk/credential-provider-node": "^3.972.25", "@aws-sdk/middleware-bucket-endpoint": "^3.972.8", "@aws-sdk/middleware-expect-continue": "^3.972.8", "@aws-sdk/middleware-flexible-checksums": "^3.974.4", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-location-constraint": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.8", "@aws-sdk/middleware-sdk-s3": "^3.972.24", "@aws-sdk/middleware-ssec": "^3.972.8", "@aws-sdk/middleware-user-agent": "^3.972.25", "@aws-sdk/region-config-resolver": "^3.972.9", "@aws-sdk/signature-v4-multi-region": "^3.996.12", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.11", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.12", "@smithy/eventstream-serde-browser": "^4.2.12", "@smithy/eventstream-serde-config-resolver": "^4.3.12", "@smithy/eventstream-serde-node": "^4.2.12", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-blob-browser": "^4.2.13", "@smithy/hash-node": "^4.2.12", "@smithy/hash-stream-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/md5-js": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.27", "@smithy/middleware-retry": "^4.4.44", "@smithy/middleware-serde": "^4.2.15", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.0", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.7", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.43", "@smithy/util-defaults-mode-node": "^4.2.47", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.12", "@smithy/util-stream": "^4.5.20", "@smithy/util-utf8": "^4.2.2", "@smithy/util-waiter": "^4.2.13", "tslib": "^2.6.2" } }, "sha512-yo+Y+/fq5/E684SynTRO+VA3a+98MeE/hs7J52XpNI5SchOCSrLhLtcDKVASlGhHQdNLGLzblRgps1OZaf8sbA=="], + "@aws-sdk/client-secrets-manager": ["@aws-sdk/client-secrets-manager@3.1021.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.26", "@aws-sdk/credential-provider-node": "^3.972.29", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.9", "@aws-sdk/middleware-user-agent": "^3.972.28", "@aws-sdk/region-config-resolver": "^3.972.10", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.14", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.13", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.28", "@smithy/middleware-retry": "^4.4.46", "@smithy/middleware-serde": "^4.2.16", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.1", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.44", "@smithy/util-defaults-mode-node": "^4.2.48", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.13", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-Z2z4eEuXDBiLXwu51icmP7GYIXHoQ4KRQaNESquKa6n57rWnQ6kD6ZhsbQow/39gHvbU9uA6t+aHeTdYxw0JbQ=="], + "@aws-sdk/client-sesv2": ["@aws-sdk/client-sesv2@3.1015.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.24", "@aws-sdk/credential-provider-node": "^3.972.25", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.8", "@aws-sdk/middleware-user-agent": "^3.972.25", "@aws-sdk/region-config-resolver": "^3.972.9", "@aws-sdk/signature-v4-multi-region": "^3.996.12", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.11", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.12", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.27", "@smithy/middleware-retry": "^4.4.44", "@smithy/middleware-serde": "^4.2.15", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.0", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.7", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.43", "@smithy/util-defaults-mode-node": "^4.2.47", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.12", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-FoIU3k4Z3ccEEgZFRwLHa84erkMk25r17q7UfsNLrpP/ef3EzNYss+QM15imeS8ekbH8+P/5T6/5/9sUO3L6kA=="], "@aws-sdk/client-sqs": ["@aws-sdk/client-sqs@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/credential-provider-node": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-sdk-sqs": "3.946.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/md5-js": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-8tzFyYGAAnQg+G9eB5zAe0oEo+MJMZ3YEk+8EL4uf2zG5wKxJvTBJZr6U9I1CEXYUde374OyLMyKng+sWyN+wg=="], @@ -3863,6 +3866,46 @@ "@aws-sdk/client-s3/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.973.11", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.25", "@aws-sdk/types": "^3.973.6", "@smithy/node-config-provider": "^4.3.12", "@smithy/types": "^4.13.1", "@smithy/util-config-provider": "^4.2.2", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-1qdXbXo2s5MMLpUvw00284LsbhtlQ4ul7Zzdn5n+7p4WVgCMLqhxImpHIrjSoc72E/fyc4Wq8dLtUld2Gsh+lA=="], + "@aws-sdk/client-secrets-manager/@aws-sdk/core": ["@aws-sdk/core@3.973.26", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@aws-sdk/xml-builder": "^3.972.16", "@smithy/core": "^3.23.13", "@smithy/node-config-provider": "^4.3.12", "@smithy/property-provider": "^4.2.12", "@smithy/protocol-http": "^5.3.12", "@smithy/signature-v4": "^5.3.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-middleware": "^4.2.12", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-A/E6n2W42ruU+sfWk+mMUOyVXbsSgGrY3MJ9/0Az5qUdG67y8I6HYzzoAa+e/lzxxl1uCYmEL6BTMi9ZiZnplQ=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.972.29", "", { "dependencies": { "@aws-sdk/credential-provider-env": "^3.972.24", "@aws-sdk/credential-provider-http": "^3.972.26", "@aws-sdk/credential-provider-ini": "^3.972.28", "@aws-sdk/credential-provider-process": "^3.972.24", "@aws-sdk/credential-provider-sso": "^3.972.28", "@aws-sdk/credential-provider-web-identity": "^3.972.28", "@aws-sdk/types": "^3.973.6", "@smithy/credential-provider-imds": "^4.2.12", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-clSzDcvndpFJAggLDnDb36sPdlZYyEs5Zm6zgZjjUhwsJgSWiWKwFIXUVBcbruidNyBdbpOv2tNDL9sX8y3/0g=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.972.8", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@smithy/protocol-http": "^5.3.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-wAr2REfKsqoKQ+OkNqvOShnBoh+nkPurDKW7uAeVSu6kUECnWlSJiPvnoqxGlfousEY/v9LfS9sNc46hjSYDIQ=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.972.8", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-CWl5UCM57WUFaFi5kB7IBY1UmOeLvNZAZ2/OZ5l20ldiJ3TiIz1pC65gYj8X0BCPWkeR1E32mpsCk1L1I4n+lA=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.972.9", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-/Wt5+CT8dpTFQxEJ9iGy/UGrXr7p2wlIOEHvIr/YcHYByzoLjrqkYqXdJjd9UIgWjv7eqV2HnFJen93UTuwfTQ=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.972.28", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@smithy/core": "^3.23.13", "@smithy/protocol-http": "^5.3.12", "@smithy/types": "^4.13.1", "@smithy/util-retry": "^4.2.13", "tslib": "^2.6.2" } }, "sha512-cfWZFlVh7Va9lRay4PN2A9ARFzaBYcA097InT5M2CdRS05ECF5yaz86jET8Wsl2WcyKYEvVr/QNmKtYtafUHtQ=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.972.10", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@smithy/config-resolver": "^4.4.13", "@smithy/node-config-provider": "^4.3.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-1dq9ToC6e070QvnVhhbAs3bb5r6cQ10gTVc6cyRV5uvQe7P138TV2uG2i6+Yok4bAkVAcx5AqkTEBUvWEtBlsQ=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/types": ["@aws-sdk/types@3.973.6", "", { "dependencies": { "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-Atfcy4E++beKtwJHiDln2Nby8W/mam64opFPTiHEqgsthqeydFS1pY+OUlN1ouNOmf8ArPU/6cDS65anOP3KQw=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.996.5", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-endpoints": "^3.3.3", "tslib": "^2.6.2" } }, "sha512-Uh93L5sXFNbyR5sEPMzUU8tJ++Ku97EY4udmC01nB8Zu+xfBPwpIwJ6F7snqQeq8h2pf+8SGN5/NoytfKgYPIw=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.972.8", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@smithy/types": "^4.13.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-B3KGXJviV2u6Cdw2SDY2aDhoJkVfY/Q/Trwk2CMSkikE1Oi6gRzxhvhIfiRpHfmIsAhV4EA54TVEX8K6CbHbkA=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.973.14", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "^3.972.28", "@aws-sdk/types": "^3.973.6", "@smithy/node-config-provider": "^4.3.12", "@smithy/types": "^4.13.1", "@smithy/util-config-provider": "^4.2.2", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-vNSB/DYaPOyujVZBg/zUznH9QC142MaTHVmaFlF7uzzfg3CgT9f/l4C0Yi+vU/tbBhxVcXVB90Oohk5+o+ZbWw=="], + + "@aws-sdk/client-secrets-manager/@smithy/core": ["@smithy/core@3.23.13", "", { "dependencies": { "@smithy/protocol-http": "^5.3.12", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-middleware": "^4.2.12", "@smithy/util-stream": "^4.5.21", "@smithy/util-utf8": "^4.2.2", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" } }, "sha512-J+2TT9D6oGsUVXVEMvz8h2EmdVnkBiy2auCie4aSJMvKlzUtO5hqjEzXhoCUkIMo7gAYjbQcN0g/MMSXEhDs1Q=="], + + "@aws-sdk/client-secrets-manager/@smithy/middleware-endpoint": ["@smithy/middleware-endpoint@4.4.28", "", { "dependencies": { "@smithy/core": "^3.23.13", "@smithy/middleware-serde": "^4.2.16", "@smithy/node-config-provider": "^4.3.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-middleware": "^4.2.12", "tslib": "^2.6.2" } }, "sha512-p1gfYpi91CHcs5cBq982UlGlDrxoYUX6XdHSo91cQ2KFuz6QloHosO7Jc60pJiVmkWrKOV8kFYlGFFbQ2WUKKQ=="], + + "@aws-sdk/client-secrets-manager/@smithy/middleware-retry": ["@smithy/middleware-retry@4.4.46", "", { "dependencies": { "@smithy/node-config-provider": "^4.3.12", "@smithy/protocol-http": "^5.3.12", "@smithy/service-error-classification": "^4.2.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.13", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" } }, "sha512-SpvWNNOPOrKQGUqZbEPO+es+FRXMWvIyzUKUOYdDgdlA6BdZj/R58p4umoQ76c2oJC44PiM7mKizyyex1IJzow=="], + + "@aws-sdk/client-secrets-manager/@smithy/middleware-serde": ["@smithy/middleware-serde@4.2.16", "", { "dependencies": { "@smithy/core": "^3.23.13", "@smithy/protocol-http": "^5.3.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-beqfV+RZ9RSv+sQqor3xroUUYgRFCGRw6niGstPG8zO9LgTl0B0MCucxjmrH/2WwksQN7UUgI7KNANoZv+KALA=="], + + "@aws-sdk/client-secrets-manager/@smithy/node-http-handler": ["@smithy/node-http-handler@4.5.1", "", { "dependencies": { "@smithy/protocol-http": "^5.3.12", "@smithy/querystring-builder": "^4.2.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-ejjxdAXjkPIs9lyYyVutOGNOraqUE9v/NjGMKwwFrfOM354wfSD8lmlj8hVwUzQmlLLF4+udhfCX9Exnbmvfzw=="], + + "@aws-sdk/client-secrets-manager/@smithy/smithy-client": ["@smithy/smithy-client@4.12.8", "", { "dependencies": { "@smithy/core": "^3.23.13", "@smithy/middleware-endpoint": "^4.4.28", "@smithy/middleware-stack": "^4.2.12", "@smithy/protocol-http": "^5.3.12", "@smithy/types": "^4.13.1", "@smithy/util-stream": "^4.5.21", "tslib": "^2.6.2" } }, "sha512-aJaAX7vHe5i66smoSSID7t4rKY08PbD8EBU7DOloixvhOozfYWdcSYE4l6/tjkZ0vBZhGjheWzB2mh31sLgCMA=="], + + "@aws-sdk/client-secrets-manager/@smithy/util-defaults-mode-browser": ["@smithy/util-defaults-mode-browser@4.3.44", "", { "dependencies": { "@smithy/property-provider": "^4.2.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-eZg6XzaCbVr2S5cAErU5eGBDaOVTuTo1I65i4tQcHENRcZ8rMWhQy1DaIYUSLyZjsfXvmCqZrstSMYyGFocvHA=="], + + "@aws-sdk/client-secrets-manager/@smithy/util-defaults-mode-node": ["@smithy/util-defaults-mode-node@4.2.48", "", { "dependencies": { "@smithy/config-resolver": "^4.4.13", "@smithy/credential-provider-imds": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/property-provider": "^4.2.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-FqOKTlqSaoV3nzO55pMs5NBnZX8EhoI0DGmn9kbYeXWppgHD6dchyuj2HLqp4INJDJbSrj6OFYJkAh/WhSzZPg=="], + + "@aws-sdk/client-secrets-manager/@smithy/util-retry": ["@smithy/util-retry@4.2.13", "", { "dependencies": { "@smithy/service-error-classification": "^4.2.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-qQQsIvL0MGIbUjeSrg0/VlQ3jGNKyM3/2iU3FPNgy01z+Sp4OvcaxbgIoFOTvB61ZoohtutuOvOcgmhbD0katQ=="], + "@aws-sdk/client-sesv2/@aws-sdk/core": ["@aws-sdk/core@3.973.24", "", { "dependencies": { "@aws-sdk/types": "^3.973.6", "@aws-sdk/xml-builder": "^3.972.15", "@smithy/core": "^3.23.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/property-provider": "^4.2.12", "@smithy/protocol-http": "^5.3.12", "@smithy/signature-v4": "^5.3.12", "@smithy/smithy-client": "^4.12.7", "@smithy/types": "^4.13.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-middleware": "^4.2.12", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-vvf82RYQu2GidWAuQq+uIzaPz9V0gSCXVqdVzRosgl5rXcspXOpSD3wFreGGW6AYymPr97Z69kjVnLePBxloDw=="], "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.972.25", "", { "dependencies": { "@aws-sdk/credential-provider-env": "^3.972.22", "@aws-sdk/credential-provider-http": "^3.972.24", "@aws-sdk/credential-provider-ini": "^3.972.24", "@aws-sdk/credential-provider-process": "^3.972.22", "@aws-sdk/credential-provider-sso": "^3.972.24", "@aws-sdk/credential-provider-web-identity": "^3.972.24", "@aws-sdk/types": "^3.973.6", "@smithy/credential-provider-imds": "^4.2.12", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-m7dR0Dsva2P+VUpL+VkC0WwiDby5pgmWXkRVDB5rlwv0jXJrQJf7YMtCoM8Wjk0H9jPeCYOxOXXcIgp/qp5Alg=="], @@ -4517,6 +4560,24 @@ "@aws-sdk/client-s3/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.972.24", "", { "dependencies": { "@aws-sdk/core": "^3.973.24", "@aws-sdk/nested-clients": "^3.996.14", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-J6H4R1nvr3uBTqD/EeIPAskrBtET4WFfNhpFySr2xW7bVZOXpQfPjrLSIx65jcNjBmLXzWq8QFLdVoGxiGG/SA=="], + "@aws-sdk/client-secrets-manager/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.16", "", { "dependencies": { "@smithy/types": "^4.13.1", "fast-xml-parser": "5.5.8", "tslib": "^2.6.2" } }, "sha512-iu2pyvaqmeatIJLURLqx9D+4jKAdTH20ntzB6BFwjyN7V960r4jK32mx0Zf7YbtOYAbmbtQfDNuL60ONinyw7A=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.972.24", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-FWg8uFmT6vQM7VuzELzwVo5bzExGaKHdubn0StjgrcU5FvuLExUe+k06kn/40uKv59rYzhez8eFNM4yYE/Yb/w=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.972.26", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/types": "^3.973.6", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/node-http-handler": "^4.5.1", "@smithy/property-provider": "^4.2.12", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/util-stream": "^4.5.21", "tslib": "^2.6.2" } }, "sha512-CY4ppZ+qHYqcXqBVi//sdHST1QK3KzOEiLtpLsc9W2k2vfZPKExGaQIsOwcyvjpjUEolotitmd3mUNY56IwDEA=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.972.28", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/credential-provider-env": "^3.972.24", "@aws-sdk/credential-provider-http": "^3.972.26", "@aws-sdk/credential-provider-login": "^3.972.28", "@aws-sdk/credential-provider-process": "^3.972.24", "@aws-sdk/credential-provider-sso": "^3.972.28", "@aws-sdk/credential-provider-web-identity": "^3.972.28", "@aws-sdk/nested-clients": "^3.996.18", "@aws-sdk/types": "^3.973.6", "@smithy/credential-provider-imds": "^4.2.12", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-wXYvq3+uQcZV7k+bE4yDXCTBdzWTU9x/nMiKBfzInmv6yYK1veMK0AKvRfRBd72nGWYKcL6AxwiPg9z/pYlgpw=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.972.24", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-Q2k/XLrFXhEztPHqj4SLCNID3hEPdlhh1CDLBpNnM+1L8fq7P+yON9/9M1IGN/dA5W45v44ylERfXtDAlmMNmw=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.972.28", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/nested-clients": "^3.996.18", "@aws-sdk/token-providers": "3.1021.0", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-IoUlmKMLEITFn1SiCTjPfR6KrE799FBo5baWyk/5Ppar2yXZoUdaRqZzJzK6TcJxx450M8m8DbpddRVYlp5R/A=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.972.28", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/nested-clients": "^3.996.18", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-d+6h0SD8GGERzKe27v5rOzNGKOl0D+l0bWJdqrxH8WSQzHzjsQFIAPgIeOTUwBHVsKKwtSxc91K/SWax6XgswQ=="], + + "@aws-sdk/client-secrets-manager/@smithy/core/@smithy/util-stream": ["@smithy/util-stream@4.5.21", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.15", "@smithy/node-http-handler": "^4.5.1", "@smithy/types": "^4.13.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-KzSg+7KKywLnkoKejRtIBXDmwBfjGvg1U1i/etkC7XSWUyFCoLno1IohV2c74IzQqdhX5y3uE44r/8/wuK+A7Q=="], + + "@aws-sdk/client-secrets-manager/@smithy/smithy-client/@smithy/util-stream": ["@smithy/util-stream@4.5.21", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.15", "@smithy/node-http-handler": "^4.5.1", "@smithy/types": "^4.13.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-KzSg+7KKywLnkoKejRtIBXDmwBfjGvg1U1i/etkC7XSWUyFCoLno1IohV2c74IzQqdhX5y3uE44r/8/wuK+A7Q=="], + "@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.972.15", "", { "dependencies": { "@smithy/types": "^4.13.1", "fast-xml-parser": "5.5.8", "tslib": "^2.6.2" } }, "sha512-PxMRlCFNiQnke9YR29vjFQwz4jq+6Q04rOVFeTDR2K7Qpv9h9FOWOxG+zJjageimYbWqE3bTuLjmryWHAWbvaA=="], "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.972.22", "", { "dependencies": { "@aws-sdk/core": "^3.973.24", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-cXp0VTDWT76p3hyK5D51yIKEfpf6/zsUvMfaB8CkyqadJxMQ8SbEeVroregmDlZbtG31wkj9ei0WnftmieggLg=="], @@ -5067,6 +5128,20 @@ "@aws-sdk/client-s3/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.996.14", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.24", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.8", "@aws-sdk/middleware-user-agent": "^3.972.25", "@aws-sdk/region-config-resolver": "^3.972.9", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.11", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.12", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.27", "@smithy/middleware-retry": "^4.4.44", "@smithy/middleware-serde": "^4.2.15", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.0", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.7", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.43", "@smithy/util-defaults-mode-node": "^4.2.47", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.12", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-fSESKvh1VbfjtV3QMnRkCPZWkUbQof6T/DOpiLp33yP2wA+rbwwnZeG3XT3Ekljgw2I8X4XaQPnw+zSR8yxJ5Q=="], + "@aws-sdk/client-secrets-manager/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-http/@smithy/util-stream": ["@smithy/util-stream@4.5.21", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.15", "@smithy/node-http-handler": "^4.5.1", "@smithy/types": "^4.13.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-KzSg+7KKywLnkoKejRtIBXDmwBfjGvg1U1i/etkC7XSWUyFCoLno1IohV2c74IzQqdhX5y3uE44r/8/wuK+A7Q=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.972.28", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/nested-clients": "^3.996.18", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/protocol-http": "^5.3.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-ZSTfO6jqUTCysbdBPtEX5OUR//3rbD0lN7jO3sQeS2Gjr/Y+DT6SbIJ0oT2cemNw3UzKu97sNONd1CwNMthuZQ=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.996.18", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.26", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.9", "@aws-sdk/middleware-user-agent": "^3.972.28", "@aws-sdk/region-config-resolver": "^3.972.10", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.14", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.13", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.28", "@smithy/middleware-retry": "^4.4.46", "@smithy/middleware-serde": "^4.2.16", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.1", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.44", "@smithy/util-defaults-mode-node": "^4.2.48", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.13", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-c7ZSIXrESxHKx2Mcopgd8AlzZgoXMr20fkx5ViPWPOLBvmyhw9VwJx/Govg8Ef/IhEon5R9l53Z8fdYSEmp6VA=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.996.18", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.26", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.9", "@aws-sdk/middleware-user-agent": "^3.972.28", "@aws-sdk/region-config-resolver": "^3.972.10", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.14", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.13", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.28", "@smithy/middleware-retry": "^4.4.46", "@smithy/middleware-serde": "^4.2.16", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.1", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.44", "@smithy/util-defaults-mode-node": "^4.2.48", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.13", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-c7ZSIXrESxHKx2Mcopgd8AlzZgoXMr20fkx5ViPWPOLBvmyhw9VwJx/Govg8Ef/IhEon5R9l53Z8fdYSEmp6VA=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.1021.0", "", { "dependencies": { "@aws-sdk/core": "^3.973.26", "@aws-sdk/nested-clients": "^3.996.18", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-TKY6h9spUk3OLs5v1oAgW9mAeBE3LAGNBwJokLy96wwmd4W2v/tYlXseProyed9ValDj2u1jK/4Rg1T+1NXyJA=="], + + "@aws-sdk/client-secrets-manager/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.996.18", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.26", "@aws-sdk/middleware-host-header": "^3.972.8", "@aws-sdk/middleware-logger": "^3.972.8", "@aws-sdk/middleware-recursion-detection": "^3.972.9", "@aws-sdk/middleware-user-agent": "^3.972.28", "@aws-sdk/region-config-resolver": "^3.972.10", "@aws-sdk/types": "^3.973.6", "@aws-sdk/util-endpoints": "^3.996.5", "@aws-sdk/util-user-agent-browser": "^3.972.8", "@aws-sdk/util-user-agent-node": "^3.973.14", "@smithy/config-resolver": "^4.4.13", "@smithy/core": "^3.23.13", "@smithy/fetch-http-handler": "^5.3.15", "@smithy/hash-node": "^4.2.12", "@smithy/invalid-dependency": "^4.2.12", "@smithy/middleware-content-length": "^4.2.12", "@smithy/middleware-endpoint": "^4.4.28", "@smithy/middleware-retry": "^4.4.46", "@smithy/middleware-serde": "^4.2.16", "@smithy/middleware-stack": "^4.2.12", "@smithy/node-config-provider": "^4.3.12", "@smithy/node-http-handler": "^4.5.1", "@smithy/protocol-http": "^5.3.12", "@smithy/smithy-client": "^4.12.8", "@smithy/types": "^4.13.1", "@smithy/url-parser": "^4.2.12", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", "@smithy/util-defaults-mode-browser": "^4.3.44", "@smithy/util-defaults-mode-node": "^4.2.48", "@smithy/util-endpoints": "^3.3.3", "@smithy/util-middleware": "^4.2.12", "@smithy/util-retry": "^4.2.13", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-c7ZSIXrESxHKx2Mcopgd8AlzZgoXMr20fkx5ViPWPOLBvmyhw9VwJx/Govg8Ef/IhEon5R9l53Z8fdYSEmp6VA=="], + "@aws-sdk/client-sesv2/@aws-sdk/core/@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="], "@aws-sdk/client-sesv2/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-ini/@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.972.24", "", { "dependencies": { "@aws-sdk/core": "^3.973.24", "@aws-sdk/nested-clients": "^3.996.14", "@aws-sdk/types": "^3.973.6", "@smithy/property-provider": "^4.2.12", "@smithy/protocol-http": "^5.3.12", "@smithy/shared-ini-file-loader": "^4.4.7", "@smithy/types": "^4.13.1", "tslib": "^2.6.2" } }, "sha512-sIk8oa6AzDoUhxsR11svZESqvzGuXesw62Rl2oW6wguZx8i9cdGCvkFg+h5K7iucUZP8wyWibUbJMc+J66cu5g=="],