diff --git a/.claude/commands/add-block.md b/.claude/commands/add-block.md index d31edcf019..9c229c2271 100644 --- a/.claude/commands/add-block.md +++ b/.claude/commands/add-block.md @@ -454,6 +454,8 @@ Enables AI-assisted field generation. ## Tools Configuration +**Important:** `tools.config.tool` runs during serialization before variable resolution. Put `Number()` and other type coercions in `tools.config.params` instead, which runs at execution time after variables are resolved. + **Preferred:** Use tool names directly as dropdown option IDs to avoid switch cases: ```typescript // Dropdown options use tool IDs directly diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 73e5e852fb..8f369210ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -144,6 +144,7 @@ jobs: tags: ${{ steps.meta.outputs.tags }} provenance: false sbom: false + no-cache: true # Build ARM64 images for GHCR (main branch only, runs in parallel) build-ghcr-arm64: @@ -204,6 +205,7 @@ jobs: tags: ${{ steps.meta.outputs.tags }} provenance: false sbom: false + no-cache: true # Create GHCR multi-arch manifests (only for main, after both builds) create-ghcr-manifests: diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml index d708d4c4de..e3136510eb 100644 --- a/.github/workflows/images.yml +++ b/.github/workflows/images.yml @@ -97,6 +97,7 @@ jobs: tags: ${{ steps.meta.outputs.tags }} provenance: false sbom: false + no-cache: true build-ghcr-arm64: name: Build ARM64 (GHCR Only) @@ -143,6 +144,7 @@ jobs: tags: ${{ steps.meta.outputs.tags }} provenance: false sbom: false + no-cache: true create-ghcr-manifests: name: Create GHCR Manifests diff --git a/CLAUDE.md b/CLAUDE.md index 71e8ef716c..edc351d71d 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -238,7 +238,7 @@ export const ServiceBlock: BlockConfig = { bgColor: '#hexcolor', icon: ServiceIcon, subBlocks: [ /* see SubBlock Properties */ ], - tools: { access: ['service_action'], config: { tool: (p) => `service_${p.operation}` } }, + tools: { access: ['service_action'], config: { tool: (p) => `service_${p.operation}`, params: (p) => ({ /* type coercions here */ }) } }, inputs: { /* ... */ }, outputs: { /* ... */ }, } @@ -246,6 +246,8 @@ export const ServiceBlock: BlockConfig = { Register in `blocks/registry.ts` (alphabetically). +**Important:** `tools.config.tool` runs during serialization (before variable resolution). Never do `Number()` or other type coercions there — dynamic references like `` will be destroyed. Use `tools.config.params` for type coercions (it runs during execution, after variables are resolved). + **SubBlock Properties:** ```typescript { diff --git a/apps/docs/components/icons.tsx b/apps/docs/components/icons.tsx index c31380d81c..16c248b74d 100644 --- a/apps/docs/components/icons.tsx +++ b/apps/docs/components/icons.tsx @@ -1157,6 +1157,17 @@ export function AirweaveIcon(props: SVGProps) { ) } +export function AlgoliaIcon(props: SVGProps) { + return ( + + + + ) +} + export function GoogleBooksIcon(props: SVGProps) { return ( @@ -4407,6 +4418,161 @@ export function DatadogIcon(props: SVGProps) { ) } +export function MicrosoftDataverseIcon(props: SVGProps) { + const id = useId() + const clip0 = `dataverse_clip0_${id}` + const clip1 = `dataverse_clip1_${id}` + const clip2 = `dataverse_clip2_${id}` + const paint0 = `dataverse_paint0_${id}` + const paint1 = `dataverse_paint1_${id}` + const paint2 = `dataverse_paint2_${id}` + const paint3 = `dataverse_paint3_${id}` + const paint4 = `dataverse_paint4_${id}` + const paint5 = `dataverse_paint5_${id}` + const paint6 = `dataverse_paint6_${id}` + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) +} + export function KalshiIcon(props: SVGProps) { return ( @@ -4809,6 +4975,26 @@ export function BedrockIcon(props: SVGProps) { ) } +export function TableIcon(props: SVGProps) { + return ( + + + + + + + + ) +} export function ReductoIcon(props: SVGProps) { return ( ) { ) } + +export function CloudflareIcon(props: SVGProps) { + return ( + + + + + ) +} + +export function UpstashIcon(props: SVGProps) { + return ( + + + + + + + + ) +} + +export function RevenueCatIcon(props: SVGProps) { + return ( + + + + + ) +} + +export function RedisIcon(props: SVGProps) { + return ( + + + + ) +} diff --git a/apps/docs/components/ui/icon-mapping.ts b/apps/docs/components/ui/icon-mapping.ts index 171d8461be..061586caee 100644 --- a/apps/docs/components/ui/icon-mapping.ts +++ b/apps/docs/components/ui/icon-mapping.ts @@ -8,6 +8,7 @@ import { AhrefsIcon, AirtableIcon, AirweaveIcon, + AlgoliaIcon, ApifyIcon, ApolloIcon, ArxivIcon, @@ -19,6 +20,7 @@ import { CirclebackIcon, ClayIcon, ClerkIcon, + CloudflareIcon, ConfluenceIcon, CursorIcon, DatadogIcon, @@ -71,6 +73,7 @@ import { MailgunIcon, MailServerIcon, Mem0Icon, + MicrosoftDataverseIcon, MicrosoftExcelIcon, MicrosoftOneDriveIcon, MicrosoftPlannerIcon, @@ -96,8 +99,10 @@ import { QdrantIcon, RDSIcon, RedditIcon, + RedisIcon, ReductoIcon, ResendIcon, + RevenueCatIcon, S3Icon, SalesforceIcon, SearchIcon, @@ -125,6 +130,7 @@ import { TTSIcon, TwilioIcon, TypeformIcon, + UpstashIcon, VercelIcon, VideoIcon, WealthboxIcon, @@ -146,6 +152,7 @@ export const blockTypeToIconMap: Record = { ahrefs: AhrefsIcon, airtable: AirtableIcon, airweave: AirweaveIcon, + algolia: AlgoliaIcon, apify: ApifyIcon, apollo: ApolloIcon, arxiv: ArxivIcon, @@ -156,6 +163,7 @@ export const blockTypeToIconMap: Record = { circleback: CirclebackIcon, clay: ClayIcon, clerk: ClerkIcon, + cloudflare: CloudflareIcon, confluence_v2: ConfluenceIcon, cursor_v2: CursorIcon, datadog: DatadogIcon, @@ -209,6 +217,7 @@ export const blockTypeToIconMap: Record = { mailgun: MailgunIcon, mem0: Mem0Icon, memory: BrainIcon, + microsoft_dataverse: MicrosoftDataverseIcon, microsoft_excel_v2: MicrosoftExcelIcon, microsoft_planner: MicrosoftPlannerIcon, microsoft_teams: MicrosoftTeamsIcon, @@ -232,8 +241,10 @@ export const blockTypeToIconMap: Record = { qdrant: QdrantIcon, rds: RDSIcon, reddit: RedditIcon, + redis: RedisIcon, reducto_v2: ReductoIcon, resend: ResendIcon, + revenuecat: RevenueCatIcon, s3: S3Icon, salesforce: SalesforceIcon, search: SearchIcon, @@ -263,6 +274,7 @@ export const blockTypeToIconMap: Record = { twilio_sms: TwilioIcon, twilio_voice: TwilioIcon, typeform: TypeformIcon, + upstash: UpstashIcon, vercel: VercelIcon, video_generator_v2: VideoIcon, vision_v2: EyeIcon, diff --git a/apps/docs/content/docs/en/tools/algolia.mdx b/apps/docs/content/docs/en/tools/algolia.mdx new file mode 100644 index 0000000000..681b5be577 --- /dev/null +++ b/apps/docs/content/docs/en/tools/algolia.mdx @@ -0,0 +1,404 @@ +--- +title: Algolia +description: Search and manage Algolia indices +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[Algolia](https://www.algolia.com/) is a powerful hosted search platform that enables developers and teams to deliver fast, relevant search experiences in their apps and websites. Algolia provides full-text, faceted, and filtered search as well as analytics and advanced ranking capabilities. + +With Algolia, you can: + +- **Deliver lightning-fast search**: Provide instant search results as users type, with typo tolerance and synonyms +- **Manage and update records**: Easily add, update, or delete objects/records in your indices +- **Perform advanced filtering**: Use filters, facets, and custom ranking to refine and organize search results +- **Configure index settings**: Adjust relevance, ranking, attributes for search, and more to optimize user experience +- **Scale confidently**: Algolia handles massive traffic and data volumes with globally distributed infrastructure +- **Gain insights**: Track analytics, search patterns, and user engagement + +In Sim, the Algolia integration allows your agents to search, manage, and configure Algolia indices directly within your workflows. Use Algolia to power dynamic data exploration, automate record updates, run batch operations, and more—all from a single tool in your workspace. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate Algolia into your workflow. Search indices, manage records (add, update, delete, browse), configure index settings, and perform batch operations. + + + +## Tools + +### `algolia_search` + +Search an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia API Key | +| `indexName` | string | Yes | Name of the Algolia index to search | +| `query` | string | Yes | Search query text | +| `hitsPerPage` | number | No | Number of hits per page \(default: 20\) | +| `page` | number | No | Page number to retrieve \(default: 0\) | +| `filters` | string | No | Filter string \(e.g., "category:electronics AND price < 100"\) | +| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `hits` | array | Array of matching records | +| ↳ `objectID` | string | Unique identifier of the record | +| ↳ `_highlightResult` | object | Highlighted attributes matching the query. Each attribute has value, matchLevel \(none, partial, full\), and matchedWords | +| ↳ `_snippetResult` | object | Snippeted attributes matching the query. Each attribute has value and matchLevel | +| ↳ `_rankingInfo` | object | Ranking information for the hit. Only present when getRankingInfo is enabled | +| ↳ `nbTypos` | number | Number of typos in the query match | +| ↳ `firstMatchedWord` | number | Position of the first matched word | +| ↳ `geoDistance` | number | Distance in meters for geo-search results | +| ↳ `nbExactWords` | number | Number of exactly matched words | +| ↳ `userScore` | number | Custom ranking score | +| ↳ `words` | number | Number of matched words | +| `nbHits` | number | Total number of matching hits | +| `page` | number | Current page number \(zero-based\) | +| `nbPages` | number | Total number of pages available | +| `hitsPerPage` | number | Number of hits per page \(1-1000, default 20\) | +| `processingTimeMS` | number | Server-side processing time in milliseconds | +| `query` | string | The search query that was executed | +| `parsedQuery` | string | The query string after normalization and stop word removal | +| `facets` | object | Facet counts keyed by facet name, each containing value-count pairs | +| `facets_stats` | object | Statistics \(min, max, avg, sum\) for numeric facets | +| `exhaustive` | object | Exhaustiveness flags for facetsCount, facetValues, nbHits, rulesMatch, and typo | + +### `algolia_add_record` + +Add or replace a record in an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key | +| `indexName` | string | Yes | Name of the Algolia index | +| `objectID` | string | No | Object ID for the record \(auto-generated if not provided\) | +| `record` | json | Yes | JSON object representing the record to add | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the indexing operation | +| `objectID` | string | The object ID of the added or replaced record | +| `createdAt` | string | Timestamp when the record was created \(only present when objectID is auto-generated\) | +| `updatedAt` | string | Timestamp when the record was updated \(only present when replacing an existing record\) | + +### `algolia_get_record` + +Get a record by objectID from an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia API Key | +| `indexName` | string | Yes | Name of the Algolia index | +| `objectID` | string | Yes | The objectID of the record to retrieve | +| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `objectID` | string | The objectID of the retrieved record | +| `record` | object | The record data \(all attributes\) | + +### `algolia_get_records` + +Retrieve multiple records by objectID from one or more Algolia indices + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia API Key | +| `indexName` | string | Yes | Default index name for all requests | +| `requests` | json | Yes | Array of objects specifying records to retrieve. Each must have "objectID" and optionally "indexName" and "attributesToRetrieve". | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `results` | array | Array of retrieved records \(null entries for records not found\) | +| ↳ `objectID` | string | Unique identifier of the record | + +### `algolia_partial_update_record` + +Partially update a record in an Algolia index without replacing it entirely + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key | +| `indexName` | string | Yes | Name of the Algolia index | +| `objectID` | string | Yes | The objectID of the record to update | +| `attributes` | json | Yes | JSON object with attributes to update. Supports built-in operations like \{"stock": \{"_operation": "Decrement", "value": 1\}\} | +| `createIfNotExists` | boolean | No | Whether to create the record if it does not exist \(default: true\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the update operation | +| `objectID` | string | The objectID of the updated record | +| `updatedAt` | string | Timestamp when the record was updated | + +### `algolia_delete_record` + +Delete a record by objectID from an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key | +| `indexName` | string | Yes | Name of the Algolia index | +| `objectID` | string | Yes | The objectID of the record to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the deletion | +| `deletedAt` | string | Timestamp when the record was deleted | + +### `algolia_browse_records` + +Browse and iterate over all records in an Algolia index using cursor pagination + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia API Key \(must have browse ACL\) | +| `indexName` | string | Yes | Name of the Algolia index to browse | +| `query` | string | No | Search query to filter browsed records | +| `filters` | string | No | Filter string to narrow down results | +| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve | +| `hitsPerPage` | number | No | Number of hits per page \(default: 1000, max: 1000\) | +| `cursor` | string | No | Cursor from a previous browse response for pagination | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `hits` | array | Array of records from the index \(up to 1000 per request\) | +| ↳ `objectID` | string | Unique identifier of the record | +| `cursor` | string | Opaque cursor string for retrieving the next page of results. Absent when no more results exist. | +| `nbHits` | number | Total number of records matching the browse criteria | +| `page` | number | Current page number \(zero-based\) | +| `nbPages` | number | Total number of pages available | +| `hitsPerPage` | number | Number of hits per page \(1-1000, default 1000 for browse\) | +| `processingTimeMS` | number | Server-side processing time in milliseconds | + +### `algolia_batch_operations` + +Perform batch add, update, partial update, or delete operations on records in an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key | +| `indexName` | string | Yes | Name of the Algolia index | +| `requests` | json | Yes | Array of batch operations. Each item has "action" \(addObject, updateObject, partialUpdateObject, partialUpdateObjectNoCreate, deleteObject\) and "body" \(the record data, must include objectID for update/delete\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the batch operation | +| `objectIDs` | array | Array of object IDs affected by the batch operation | + +### `algolia_list_indices` + +List all indices in an Algolia application + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia API Key | +| `page` | number | No | Page number for paginating indices \(default: not paginated\) | +| `hitsPerPage` | number | No | Number of indices per page \(default: 100\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `indices` | array | List of indices in the application | +| ↳ `name` | string | Name of the index | +| ↳ `entries` | number | Number of records in the index | +| ↳ `dataSize` | number | Size of the index data in bytes | +| ↳ `fileSize` | number | Size of the index files in bytes | +| ↳ `lastBuildTimeS` | number | Last build duration in seconds | +| ↳ `numberOfPendingTasks` | number | Number of pending indexing tasks | +| ↳ `pendingTask` | boolean | Whether the index has pending tasks | +| ↳ `createdAt` | string | Timestamp when the index was created | +| ↳ `updatedAt` | string | Timestamp when the index was last updated | +| ↳ `primary` | string | Name of the primary index \(if this is a replica\) | +| ↳ `replicas` | array | List of replica index names | +| ↳ `virtual` | boolean | Whether the index is a virtual replica | +| `nbPages` | number | Total number of pages of indices | + +### `algolia_get_settings` + +Retrieve the settings of an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia API Key | +| `indexName` | string | Yes | Name of the Algolia index | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `searchableAttributes` | array | List of searchable attributes | +| `attributesForFaceting` | array | Attributes used for faceting | +| `ranking` | array | Ranking criteria | +| `customRanking` | array | Custom ranking criteria | +| `replicas` | array | List of replica index names | +| `hitsPerPage` | number | Default number of hits per page | +| `maxValuesPerFacet` | number | Maximum number of facet values returned | +| `highlightPreTag` | string | HTML tag inserted before highlighted parts | +| `highlightPostTag` | string | HTML tag inserted after highlighted parts | +| `paginationLimitedTo` | number | Maximum number of hits accessible via pagination | + +### `algolia_update_settings` + +Update the settings of an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key \(must have editSettings ACL\) | +| `indexName` | string | Yes | Name of the Algolia index | +| `settings` | json | Yes | JSON object with settings to update \(e.g., \{"searchableAttributes": \["name", "description"\], "customRanking": \["desc\(popularity\)"\]\}\) | +| `forwardToReplicas` | boolean | No | Whether to apply changes to replica indices \(default: false\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the settings update | +| `updatedAt` | string | Timestamp when the settings were updated | + +### `algolia_delete_index` + +Delete an entire Algolia index and all its records + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) | +| `indexName` | string | Yes | Name of the Algolia index to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the index deletion | +| `deletedAt` | string | Timestamp when the index was deleted | + +### `algolia_copy_move_index` + +Copy or move an Algolia index to a new destination + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key | +| `indexName` | string | Yes | Name of the source index | +| `operation` | string | Yes | Operation to perform: "copy" or "move" | +| `destination` | string | Yes | Name of the destination index | +| `scope` | json | No | Array of scopes to copy \(only for "copy" operation\): \["settings", "synonyms", "rules"\]. Omit to copy everything including records. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the copy/move operation | +| `updatedAt` | string | Timestamp when the operation was performed | + +### `algolia_clear_records` + +Clear all records from an Algolia index while keeping settings, synonyms, and rules + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) | +| `indexName` | string | Yes | Name of the Algolia index to clear | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the clear operation | +| `updatedAt` | string | Timestamp when the records were cleared | + +### `algolia_delete_by_filter` + +Delete all records matching a filter from an Algolia index + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `applicationId` | string | Yes | Algolia Application ID | +| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) | +| `indexName` | string | Yes | Name of the Algolia index | +| `filters` | string | No | Filter expression to match records for deletion \(e.g., "category:outdated"\) | +| `facetFilters` | json | No | Array of facet filters \(e.g., \["brand:Acme"\]\) | +| `numericFilters` | json | No | Array of numeric filters \(e.g., \["price > 100"\]\) | +| `tagFilters` | json | No | Array of tag filters using the _tags attribute \(e.g., \["published"\]\) | +| `aroundLatLng` | string | No | Coordinates for geo-search filter \(e.g., "40.71,-74.01"\) | +| `aroundRadius` | number | No | Maximum radius in meters for geo-search, or "all" for unlimited | +| `insideBoundingBox` | json | No | Bounding box coordinates as \[\[lat1, lng1, lat2, lng2\]\] for geo-search filter | +| `insidePolygon` | json | No | Polygon coordinates as \[\[lat1, lng1, lat2, lng2, lat3, lng3, ...\]\] for geo-search filter | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `taskID` | number | Algolia task ID for tracking the delete-by-filter operation | +| `updatedAt` | string | Timestamp when the operation was performed | + + diff --git a/apps/docs/content/docs/en/tools/cloudflare.mdx b/apps/docs/content/docs/en/tools/cloudflare.mdx new file mode 100644 index 0000000000..67c629e071 --- /dev/null +++ b/apps/docs/content/docs/en/tools/cloudflare.mdx @@ -0,0 +1,569 @@ +--- +title: Cloudflare +description: Manage DNS, domains, certificates, and cache +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[Cloudflare](https://cloudflare.com/) is a global cloud platform that provides content delivery, domain management, cybersecurity, and performance services for websites and applications. + +In Sim, the Cloudflare integration empowers your agents to automate the management of DNS records, SSL/TLS certificates, domains (zones), cache, zone settings, and more through easy-to-use API tools. Agents can securely list and edit domains, update DNS records, monitor analytics, and manage security and performance—all as part of your automated workflows. + +With Cloudflare, you can: + +- **Manage DNS and Domains**: List all your domains (zones), view zone details, and fully control DNS records from your automated agent workflows. +- **Handle SSL/TLS Certificates and Settings**: Issue, renew, or list certificates and adjust security and performance settings for your sites. +- **Purge Cache and Analyze Traffic**: Instantly purge edge cache and review real-time DNS analytics directly within your Sim agent processes. +- **Automate Security and Operations**: Use agents to programmatically manage zones, update settings, and streamline repetitive Cloudflare tasks. + +This integration enables streamlined, secure management of your site's infrastructure from within Sim. Your agents can integrate Cloudflare operations directly into processes—keeping DNS records up-to-date, responding to security events, improving site performance, and automating large-scale site and account administration. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate Cloudflare into the workflow. Manage zones (domains), DNS records, SSL/TLS certificates, zone settings, DNS analytics, and cache purging via the Cloudflare API. + + + +## Tools + +### `cloudflare_list_zones` + +Lists all zones (domains) in the Cloudflare account. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `name` | string | No | Filter zones by domain name \(e.g., "example.com"\) | +| `status` | string | No | Filter by zone status: "initializing", "pending", "active", or "moved" | +| `page` | number | No | Page number for pagination \(default: 1\) | +| `per_page` | number | No | Number of zones per page \(default: 20, max: 50\) | +| `accountId` | string | No | Filter zones by account ID | +| `order` | string | No | Sort field \(name, status, account.id, account.name\) | +| `direction` | string | No | Sort direction \(asc, desc\) | +| `match` | string | No | Match logic for filters \(any, all\). Default: all | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `zones` | array | List of zones/domains | +| ↳ `id` | string | Zone ID | +| ↳ `name` | string | Domain name | +| ↳ `status` | string | Zone status \(initializing, pending, active, moved\) | +| ↳ `paused` | boolean | Whether the zone is paused | +| ↳ `type` | string | Zone type \(full, partial, or secondary\) | +| ↳ `name_servers` | array | Assigned Cloudflare name servers | +| ↳ `original_name_servers` | array | Original name servers before moving to Cloudflare | +| ↳ `created_on` | string | ISO 8601 date when the zone was created | +| ↳ `modified_on` | string | ISO 8601 date when the zone was last modified | +| ↳ `activated_on` | string | ISO 8601 date when the zone was activated | +| ↳ `development_mode` | number | Seconds remaining in development mode \(0 = off\) | +| ↳ `plan` | object | Zone plan information | +| ↳ `id` | string | Plan identifier | +| ↳ `name` | string | Plan name | +| ↳ `price` | number | Plan price | +| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan | +| ↳ `frequency` | string | Plan billing frequency | +| ↳ `currency` | string | Plan currency | +| ↳ `legacy_id` | string | Legacy plan identifier | +| ↳ `account` | object | Account the zone belongs to | +| ↳ `id` | string | Account identifier | +| ↳ `name` | string | Account name | +| ↳ `owner` | object | Zone owner information | +| ↳ `id` | string | Owner identifier | +| ↳ `name` | string | Owner name | +| ↳ `type` | string | Owner type | +| ↳ `meta` | object | Zone metadata | +| ↳ `cdn_only` | boolean | Whether the zone is CDN only | +| ↳ `custom_certificate_quota` | number | Custom certificate quota | +| ↳ `dns_only` | boolean | Whether the zone is DNS only | +| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled | +| ↳ `page_rule_quota` | number | Page rule quota | +| ↳ `phishing_detected` | boolean | Whether phishing was detected | +| ↳ `step` | number | Current setup step | +| ↳ `vanity_name_servers` | array | Custom vanity name servers | +| ↳ `permissions` | array | User permissions for the zone | +| `total_count` | number | Total number of zones matching the query | + +### `cloudflare_get_zone` + +Gets details for a specific zone (domain) by its ID. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to retrieve details for | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Zone ID | +| `name` | string | Domain name | +| `status` | string | Zone status \(initializing, pending, active, moved\) | +| `paused` | boolean | Whether the zone is paused | +| `type` | string | Zone type \(full, partial, or secondary\) | +| `name_servers` | array | Assigned Cloudflare name servers | +| `original_name_servers` | array | Original name servers before moving to Cloudflare | +| `created_on` | string | ISO 8601 date when the zone was created | +| `modified_on` | string | ISO 8601 date when the zone was last modified | +| `activated_on` | string | ISO 8601 date when the zone was activated | +| `development_mode` | number | Seconds remaining in development mode \(0 = off\) | +| `plan` | object | Zone plan information | +| ↳ `id` | string | Plan identifier | +| ↳ `name` | string | Plan name | +| ↳ `price` | number | Plan price | +| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan | +| ↳ `frequency` | string | Plan billing frequency | +| ↳ `currency` | string | Plan currency | +| ↳ `legacy_id` | string | Legacy plan identifier | +| `account` | object | Account the zone belongs to | +| ↳ `id` | string | Account identifier | +| ↳ `name` | string | Account name | +| `owner` | object | Zone owner information | +| ↳ `id` | string | Owner identifier | +| ↳ `name` | string | Owner name | +| ↳ `type` | string | Owner type | +| `meta` | object | Zone metadata | +| ↳ `cdn_only` | boolean | Whether the zone is CDN only | +| ↳ `custom_certificate_quota` | number | Custom certificate quota | +| ↳ `dns_only` | boolean | Whether the zone is DNS only | +| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled | +| ↳ `page_rule_quota` | number | Page rule quota | +| ↳ `phishing_detected` | boolean | Whether phishing was detected | +| ↳ `step` | number | Current setup step | +| `vanity_name_servers` | array | Custom vanity name servers | +| `permissions` | array | User permissions for the zone | + +### `cloudflare_create_zone` + +Adds a new zone (domain) to the Cloudflare account. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `name` | string | Yes | The domain name to add \(e.g., "example.com"\) | +| `accountId` | string | Yes | The Cloudflare account ID | +| `type` | string | No | Zone type: "full" \(Cloudflare manages DNS\), "partial" \(CNAME setup\), or "secondary" \(secondary DNS\) | +| `jump_start` | boolean | No | Automatically attempt to fetch existing DNS records when creating the zone | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Created zone ID | +| `name` | string | Domain name | +| `status` | string | Zone status \(initializing, pending, active, moved\) | +| `paused` | boolean | Whether the zone is paused | +| `type` | string | Zone type \(full, partial, or secondary\) | +| `name_servers` | array | Assigned Cloudflare name servers | +| `original_name_servers` | array | Original name servers before moving to Cloudflare | +| `created_on` | string | ISO 8601 date when the zone was created | +| `modified_on` | string | ISO 8601 date when the zone was last modified | +| `activated_on` | string | ISO 8601 date when the zone was activated | +| `development_mode` | number | Seconds remaining in development mode \(0 = off\) | +| `plan` | object | Zone plan information | +| ↳ `id` | string | Plan identifier | +| ↳ `name` | string | Plan name | +| ↳ `price` | number | Plan price | +| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan | +| ↳ `frequency` | string | Plan billing frequency | +| ↳ `currency` | string | Plan currency | +| ↳ `legacy_id` | string | Legacy plan identifier | +| `account` | object | Account the zone belongs to | +| ↳ `id` | string | Account identifier | +| ↳ `name` | string | Account name | +| `owner` | object | Zone owner information | +| ↳ `id` | string | Owner identifier | +| ↳ `name` | string | Owner name | +| ↳ `type` | string | Owner type | +| `meta` | object | Zone metadata | +| ↳ `cdn_only` | boolean | Whether the zone is CDN only | +| ↳ `custom_certificate_quota` | number | Custom certificate quota | +| ↳ `dns_only` | boolean | Whether the zone is DNS only | +| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled | +| ↳ `page_rule_quota` | number | Page rule quota | +| ↳ `phishing_detected` | boolean | Whether phishing was detected | +| ↳ `step` | number | Current setup step | +| `vanity_name_servers` | array | Custom vanity name servers | +| `permissions` | array | User permissions for the zone | + +### `cloudflare_delete_zone` + +Deletes a zone (domain) from the Cloudflare account. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to delete | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Deleted zone ID | + +### `cloudflare_list_dns_records` + +Lists DNS records for a specific zone. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to list DNS records for | +| `type` | string | No | Filter by record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) | +| `name` | string | No | Filter by record name \(exact match\) | +| `content` | string | No | Filter by record content \(exact match\) | +| `page` | number | No | Page number for pagination \(default: 1\) | +| `per_page` | number | No | Number of records per page \(default: 100, max: 5000000\) | +| `direction` | string | No | Sort direction \(asc or desc\) | +| `match` | string | No | Match logic for filters: any or all \(default: all\) | +| `order` | string | No | Sort field \(type, name, content, ttl, proxied\) | +| `proxied` | boolean | No | Filter by proxy status | +| `search` | string | No | Free-text search across record name, content, and value | +| `tag` | string | No | Filter by tags \(comma-separated\) | +| `tag_match` | string | No | Tag filter match logic: any or all | +| `commentFilter` | string | No | Filter records by comment content \(substring match\) | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `records` | array | List of DNS records | +| ↳ `id` | string | Unique identifier for the DNS record | +| ↳ `zone_id` | string | The ID of the zone the record belongs to | +| ↳ `zone_name` | string | The name of the zone | +| ↳ `type` | string | Record type \(A, AAAA, CNAME, MX, TXT, etc.\) | +| ↳ `name` | string | Record name \(e.g., example.com\) | +| ↳ `content` | string | Record content \(e.g., IP address\) | +| ↳ `proxiable` | boolean | Whether the record can be proxied | +| ↳ `proxied` | boolean | Whether Cloudflare proxy is enabled | +| ↳ `ttl` | number | TTL in seconds \(1 = automatic\) | +| ↳ `locked` | boolean | Whether the record is locked | +| ↳ `priority` | number | MX/SRV record priority | +| ↳ `comment` | string | Comment associated with the record | +| ↳ `tags` | array | Tags associated with the record | +| ↳ `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified | +| ↳ `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified | +| ↳ `meta` | object | Record metadata | +| ↳ `source` | string | Source of the DNS record | +| ↳ `created_on` | string | ISO 8601 timestamp when the record was created | +| ↳ `modified_on` | string | ISO 8601 timestamp when the record was last modified | +| `total_count` | number | Total number of DNS records matching the query | + +### `cloudflare_create_dns_record` + +Creates a new DNS record for a zone. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to create the DNS record in | +| `type` | string | Yes | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT", "NS", "SRV"\) | +| `name` | string | Yes | DNS record name \(e.g., "example.com" or "subdomain.example.com"\) | +| `content` | string | Yes | DNS record content \(e.g., IP address for A records, target for CNAME\) | +| `ttl` | number | No | Time to live in seconds \(1 = automatic, default: 1\) | +| `proxied` | boolean | No | Whether to enable Cloudflare proxy \(default: false\) | +| `priority` | number | No | Priority for MX and SRV records | +| `comment` | string | No | Comment for the DNS record | +| `tags` | string | No | Comma-separated tags for the DNS record | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Unique identifier for the created DNS record | +| `zone_id` | string | The ID of the zone the record belongs to | +| `zone_name` | string | The name of the zone | +| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) | +| `name` | string | DNS record hostname | +| `content` | string | DNS record value \(e.g., IP address, target hostname\) | +| `proxiable` | boolean | Whether the record can be proxied through Cloudflare | +| `proxied` | boolean | Whether Cloudflare proxy is enabled | +| `ttl` | number | Time to live in seconds \(1 = automatic\) | +| `locked` | boolean | Whether the record is locked | +| `priority` | number | Priority for MX and SRV records | +| `comment` | string | Comment associated with the record | +| `tags` | array | Tags associated with the record | +| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified | +| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified | +| `meta` | object | Record metadata | +| ↳ `source` | string | Source of the DNS record | +| `created_on` | string | ISO 8601 timestamp when the record was created | +| `modified_on` | string | ISO 8601 timestamp when the record was last modified | + +### `cloudflare_update_dns_record` + +Updates an existing DNS record for a zone. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID containing the DNS record | +| `recordId` | string | Yes | The DNS record ID to update | +| `type` | string | No | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) | +| `name` | string | No | DNS record name | +| `content` | string | No | DNS record content \(e.g., IP address\) | +| `ttl` | number | No | Time to live in seconds \(1 = automatic\) | +| `proxied` | boolean | No | Whether to enable Cloudflare proxy | +| `priority` | number | No | Priority for MX and SRV records | +| `comment` | string | No | Comment for the DNS record | +| `tags` | string | No | Comma-separated tags for the DNS record | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Unique identifier for the updated DNS record | +| `zone_id` | string | The ID of the zone the record belongs to | +| `zone_name` | string | The name of the zone | +| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) | +| `name` | string | DNS record hostname | +| `content` | string | DNS record value \(e.g., IP address, target hostname\) | +| `proxiable` | boolean | Whether the record can be proxied through Cloudflare | +| `proxied` | boolean | Whether Cloudflare proxy is enabled | +| `ttl` | number | Time to live in seconds \(1 = automatic\) | +| `locked` | boolean | Whether the record is locked | +| `priority` | number | Priority for MX and SRV records | +| `comment` | string | Comment associated with the record | +| `tags` | array | Tags associated with the record | +| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified | +| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified | +| `meta` | object | Record metadata | +| ↳ `source` | string | Source of the DNS record | +| `created_on` | string | ISO 8601 timestamp when the record was created | +| `modified_on` | string | ISO 8601 timestamp when the record was last modified | + +### `cloudflare_delete_dns_record` + +Deletes a DNS record from a zone. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID containing the DNS record | +| `recordId` | string | Yes | The DNS record ID to delete | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Deleted record ID | + +### `cloudflare_list_certificates` + +Lists SSL/TLS certificate packs for a zone. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to list certificates for | +| `status` | string | No | Filter certificate packs by status \(e.g., "all", "active", "pending"\) | +| `page` | number | No | Page number of paginated results \(default: 1\) | +| `per_page` | number | No | Number of certificate packs per page \(default: 20, min: 5, max: 50\) | +| `deploy` | string | No | Filter by deployment environment: "staging" or "production" | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `certificates` | array | List of SSL/TLS certificate packs | +| ↳ `id` | string | Certificate pack ID | +| ↳ `type` | string | Certificate type \(e.g., "universal", "advanced"\) | +| ↳ `hosts` | array | Hostnames covered by this certificate pack | +| ↳ `primary_certificate` | string | ID of the primary certificate in the pack | +| ↳ `status` | string | Certificate pack status \(e.g., "active", "pending"\) | +| ↳ `certificates` | array | Individual certificates within the pack | +| ↳ `id` | string | Certificate ID | +| ↳ `hosts` | array | Hostnames covered by this certificate | +| ↳ `issuer` | string | Certificate issuer | +| ↳ `signature` | string | Signature algorithm \(e.g., "ECDSAWithSHA256"\) | +| ↳ `status` | string | Certificate status | +| ↳ `bundle_method` | string | Bundle method \(e.g., "ubiquitous"\) | +| ↳ `zone_id` | string | Zone ID the certificate belongs to | +| ↳ `uploaded_on` | string | Upload date \(ISO 8601\) | +| ↳ `modified_on` | string | Last modified date \(ISO 8601\) | +| ↳ `expires_on` | string | Expiration date \(ISO 8601\) | +| ↳ `priority` | number | Certificate priority order | +| ↳ `geo_restrictions` | object | Geographic restrictions for the certificate | +| ↳ `label` | string | Geographic restriction label | +| ↳ `cloudflare_branding` | boolean | Whether Cloudflare branding is enabled on the certificate | +| ↳ `validation_method` | string | Validation method \(e.g., "txt", "http", "cname"\) | +| ↳ `validity_days` | number | Validity period in days | +| ↳ `certificate_authority` | string | Certificate authority \(e.g., "lets_encrypt", "google"\) | +| ↳ `validation_errors` | array | Validation issues for the certificate pack | +| ↳ `message` | string | Validation error message | +| ↳ `validation_records` | array | Validation records for the certificate pack | +| ↳ `cname` | string | CNAME record name | +| ↳ `cname_target` | string | CNAME record target | +| ↳ `emails` | array | Email addresses for validation | +| ↳ `http_body` | string | HTTP validation body content | +| ↳ `http_url` | string | HTTP validation URL | +| ↳ `status` | string | Validation record status | +| ↳ `txt_name` | string | TXT record name | +| ↳ `txt_value` | string | TXT record value | +| ↳ `dcv_delegation_records` | array | Domain control validation delegation records | +| ↳ `cname` | string | CNAME record name | +| ↳ `cname_target` | string | CNAME record target | +| ↳ `emails` | array | Email addresses for validation | +| ↳ `http_body` | string | HTTP validation body content | +| ↳ `http_url` | string | HTTP validation URL | +| ↳ `status` | string | Delegation record status | +| ↳ `txt_name` | string | TXT record name | +| ↳ `txt_value` | string | TXT record value | +| `total_count` | number | Total number of certificate packs | + +### `cloudflare_get_zone_settings` + +Gets all settings for a zone including SSL mode, minification, caching level, and security settings. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to get settings for | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `settings` | array | List of zone settings | +| ↳ `id` | string | Setting identifier \(e.g., ssl, minify, cache_level, security_level, always_use_https\) | +| ↳ `value` | string | Setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified \(e.g., \ | +| ↳ `editable` | boolean | Whether the setting can be modified for the current zone plan | +| ↳ `modified_on` | string | ISO 8601 timestamp when the setting was last modified | +| ↳ `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) | + +### `cloudflare_update_zone_setting` + +Updates a specific zone setting such as SSL mode, security level, cache level, minification, or other configuration. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to update settings for | +| `settingId` | string | Yes | Setting to update \(e.g., "ssl", "security_level", "cache_level", "minify", "always_use_https", "browser_cache_ttl", "http3", "min_tls_version", "ciphers"\) | +| `value` | string | Yes | New value for the setting as a string or JSON string for complex values \(e.g., "full" for SSL, "medium" for security_level, "aggressive" for cache_level, \'\{"css":"on","html":"on","js":"on"\}\' for minify, \'\["ECDHE-RSA-AES128-GCM-SHA256"\]\' for ciphers\) | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Setting identifier \(e.g., ssl, minify, cache_level\) | +| `value` | string | Updated setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified. | +| `editable` | boolean | Whether the setting can be modified for the current zone plan | +| `modified_on` | string | ISO 8601 timestamp when the setting was last modified | +| `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) | + +### `cloudflare_dns_analytics` + +Gets DNS analytics report for a zone including query counts and trends. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to get DNS analytics for | +| `since` | string | No | Start date for analytics \(ISO 8601, e.g., "2024-01-01T00:00:00Z"\) or relative \(e.g., "-6h"\) | +| `until` | string | No | End date for analytics \(ISO 8601, e.g., "2024-01-31T23:59:59Z"\) or relative \(e.g., "now"\) | +| `metrics` | string | Yes | Comma-separated metrics to retrieve \(e.g., "queryCount,uncachedCount,staleCount,responseTimeAvg,responseTimeMedian,responseTime90th,responseTime99th"\) | +| `dimensions` | string | No | Comma-separated dimensions to group by \(e.g., "queryName,queryType,responseCode,responseCached,coloName,origin,dayOfWeek,tcp,ipVersion,querySizeBucket,responseSizeBucket"\) | +| `filters` | string | No | Filters to apply to the data \(e.g., "queryType==A"\) | +| `sort` | string | No | Sort order for the result set. Fields must be included in metrics or dimensions \(e.g., "+queryCount" or "-responseTimeAvg"\) | +| `limit` | number | No | Maximum number of results to return | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `totals` | object | Aggregate DNS analytics totals for the entire queried period | +| ↳ `queryCount` | number | Total number of DNS queries | +| ↳ `uncachedCount` | number | Number of uncached DNS queries | +| ↳ `staleCount` | number | Number of stale DNS queries | +| ↳ `responseTimeAvg` | number | Average response time in milliseconds | +| ↳ `responseTimeMedian` | number | Median response time in milliseconds | +| ↳ `responseTime90th` | number | 90th percentile response time in milliseconds | +| ↳ `responseTime99th` | number | 99th percentile response time in milliseconds | +| `min` | object | Minimum values across the analytics period | +| ↳ `queryCount` | number | Minimum number of DNS queries | +| ↳ `uncachedCount` | number | Minimum number of uncached DNS queries | +| ↳ `staleCount` | number | Minimum number of stale DNS queries | +| ↳ `responseTimeAvg` | number | Minimum average response time in milliseconds | +| ↳ `responseTimeMedian` | number | Minimum median response time in milliseconds | +| ↳ `responseTime90th` | number | Minimum 90th percentile response time in milliseconds | +| ↳ `responseTime99th` | number | Minimum 99th percentile response time in milliseconds | +| `max` | object | Maximum values across the analytics period | +| ↳ `queryCount` | number | Maximum number of DNS queries | +| ↳ `uncachedCount` | number | Maximum number of uncached DNS queries | +| ↳ `staleCount` | number | Maximum number of stale DNS queries | +| ↳ `responseTimeAvg` | number | Maximum average response time in milliseconds | +| ↳ `responseTimeMedian` | number | Maximum median response time in milliseconds | +| ↳ `responseTime90th` | number | Maximum 90th percentile response time in milliseconds | +| ↳ `responseTime99th` | number | Maximum 99th percentile response time in milliseconds | +| `data` | array | Raw analytics data rows returned by the Cloudflare DNS analytics report | +| ↳ `dimensions` | array | Dimension values for this data row, parallel to the requested dimensions list | +| ↳ `metrics` | array | Metric values for this data row, parallel to the requested metrics list | +| `data_lag` | number | Processing lag in seconds before analytics data becomes available | +| `rows` | number | Total number of rows in the result set | +| `query` | object | Echo of the query parameters sent to the API | +| ↳ `since` | string | Start date of the analytics query | +| ↳ `until` | string | End date of the analytics query | +| ↳ `metrics` | array | Metrics requested in the query | +| ↳ `dimensions` | array | Dimensions requested in the query | +| ↳ `filters` | string | Filters applied to the query | +| ↳ `sort` | array | Sort order applied to the query | +| ↳ `limit` | number | Maximum number of results requested | + +### `cloudflare_purge_cache` + +Purges cached content for a zone. Can purge everything or specific files/tags/hosts/prefixes. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `zoneId` | string | Yes | The zone ID to purge cache for | +| `purge_everything` | boolean | No | Set to true to purge all cached content. Mutually exclusive with files, tags, hosts, and prefixes | +| `files` | string | No | Comma-separated list of URLs to purge from cache | +| `tags` | string | No | Comma-separated list of cache tags to purge \(Enterprise only\) | +| `hosts` | string | No | Comma-separated list of hostnames to purge \(Enterprise only\) | +| `prefixes` | string | No | Comma-separated list of URL prefixes to purge \(Enterprise only\) | +| `apiKey` | string | Yes | Cloudflare API Token | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `id` | string | Purge request identifier returned by Cloudflare | + + diff --git a/apps/docs/content/docs/en/tools/meta.json b/apps/docs/content/docs/en/tools/meta.json index e19b01b3d5..58317bea9f 100644 --- a/apps/docs/content/docs/en/tools/meta.json +++ b/apps/docs/content/docs/en/tools/meta.json @@ -5,6 +5,7 @@ "ahrefs", "airtable", "airweave", + "algolia", "apify", "apollo", "arxiv", @@ -15,6 +16,7 @@ "circleback", "clay", "clerk", + "cloudflare", "confluence", "cursor", "datadog", @@ -68,6 +70,7 @@ "mailgun", "mem0", "memory", + "microsoft_dataverse", "microsoft_excel", "microsoft_planner", "microsoft_teams", @@ -91,8 +94,10 @@ "qdrant", "rds", "reddit", + "redis", "reducto", "resend", + "revenuecat", "s3", "salesforce", "search", @@ -112,6 +117,7 @@ "stripe", "stt", "supabase", + "table", "tavily", "telegram", "textract", @@ -122,6 +128,7 @@ "twilio_sms", "twilio_voice", "typeform", + "upstash", "vercel", "video_generator", "vision", diff --git a/apps/docs/content/docs/en/tools/microsoft_dataverse.mdx b/apps/docs/content/docs/en/tools/microsoft_dataverse.mdx new file mode 100644 index 0000000000..e548c0ea0e --- /dev/null +++ b/apps/docs/content/docs/en/tools/microsoft_dataverse.mdx @@ -0,0 +1,426 @@ +--- +title: Microsoft Dataverse +description: Manage records in Microsoft Dataverse tables +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/maker/data-platform/data-platform-intro) is a powerful cloud data platform for securely storing, managing, and interacting with structured business data. The Microsoft Dataverse integration enables you to programmatically create, read, update, delete, and link records in Dataverse tables as part of your workflow and automation needs. + +With Microsoft Dataverse integration, you can: + +- **List and query records:** Access lists of records or query with advanced filters to find the data you need from any Dataverse table. +- **Create and update records:** Add new records or update existing ones in any table for use across Power Platform, Dynamics 365, and custom apps. +- **Delete and manage records:** Remove records as part of data lifecycle management directly from your automation flows. +- **Associate and disassociate records:** Link related items together or remove associations using entity relationships and navigation properties—essential for reflecting complex business processes. +- **Work with any Dataverse environment:** Connect to your organization’s environments, including production, sandbox, or Dynamics 365 tenants, for maximum flexibility. +- **Integrate with Power Platform and Dynamics 365:** Automate tasks ranging from sales and marketing data updates to custom app workflows—all powered by Dataverse's security and governance. + +The Dataverse integration empowers solution builders and business users to automate business processes, maintain accurate and up-to-date information, create system integrations, trigger actions, and drive insights—all with robust security and governance. + +Connect Microsoft Dataverse to your automations to unlock sophisticated data management, orchestration, and business logic across your apps, teams, and cloud services. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate Microsoft Dataverse into your workflow. Create, read, update, delete, upsert, associate, query, search, and execute actions and functions against Dataverse tables using the Web API. Supports bulk operations, FetchXML, file uploads, and relevance search. Works with Dynamics 365, Power Platform, and custom Dataverse environments. + + + +## Tools + +### `microsoft_dataverse_associate` + +Associate two records in Microsoft Dataverse via a navigation property. Creates a relationship between a source record and a target record. Supports both collection-valued (POST) and single-valued (PUT) navigation properties. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) | +| `recordId` | string | Yes | Source record GUID | +| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) | +| `targetEntitySetName` | string | Yes | Target entity set name \(e.g., contacts\) | +| `targetRecordId` | string | Yes | Target record GUID to associate | +| `navigationType` | string | No | Type of navigation property: "collection" \(default, uses POST\) or "single" \(uses PUT for lookup fields\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the association was created successfully | +| `entitySetName` | string | Source entity set name used in the association | +| `recordId` | string | Source record GUID that was associated | +| `navigationProperty` | string | Navigation property used for the association | +| `targetEntitySetName` | string | Target entity set name used in the association | +| `targetRecordId` | string | Target record GUID that was associated | + +### `microsoft_dataverse_create_multiple` + +Create multiple records of the same table type in a single request. Each record in the Targets array must include an @odata.type annotation. Recommended batch size: 100-1000 records for standard tables. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. | +| `records` | object | Yes | Array of record objects to create. Each record should contain column logical names as keys. The @odata.type annotation is added automatically. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `ids` | array | Array of GUIDs for the created records | +| `count` | number | Number of records created | +| `success` | boolean | Whether all records were created successfully | + +### `microsoft_dataverse_create_record` + +Create a new record in a Microsoft Dataverse table. Requires the entity set name (plural table name) and record data as a JSON object. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `data` | object | Yes | Record data as a JSON object with column names as keys | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `recordId` | string | The ID of the created record | +| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. | +| `success` | boolean | Whether the record was created successfully | + +### `microsoft_dataverse_delete_record` + +Delete a record from a Microsoft Dataverse table by its ID. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `recordId` | string | The ID of the deleted record | +| `success` | boolean | Operation success status | + +### `microsoft_dataverse_disassociate` + +Remove an association between two records in Microsoft Dataverse. For collection-valued navigation properties, provide the target record ID. For single-valued navigation properties, only the navigation property name is needed. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) | +| `recordId` | string | Yes | Source record GUID | +| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) | +| `targetRecordId` | string | No | Target record GUID \(required for collection-valued navigation properties, omit for single-valued\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether the disassociation was completed successfully | +| `entitySetName` | string | Source entity set name used in the disassociation | +| `recordId` | string | Source record GUID that was disassociated | +| `navigationProperty` | string | Navigation property used for the disassociation | +| `targetRecordId` | string | Target record GUID that was disassociated | + +### `microsoft_dataverse_download_file` + +Download a file from a file or image column on a Dataverse record. Returns the file content as a base64-encoded string along with file metadata from response headers. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `recordId` | string | Yes | Record GUID to download the file from | +| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `fileContent` | string | Base64-encoded file content | +| `fileName` | string | Name of the downloaded file | +| `fileSize` | number | File size in bytes | +| `mimeType` | string | MIME type of the file | +| `success` | boolean | Whether the file was downloaded successfully | + +### `microsoft_dataverse_execute_action` + +Execute a bound or unbound Dataverse action. Actions perform operations with side effects (e.g., Merge, GrantAccess, SendEmail, QualifyLead). For bound actions, provide the entity set name and record ID. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `actionName` | string | Yes | Action name \(e.g., Merge, GrantAccess, SendEmail\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound actions. | +| `entitySetName` | string | No | Entity set name for bound actions \(e.g., accounts\). Leave empty for unbound actions. | +| `recordId` | string | No | Record GUID for bound actions. Leave empty for unbound or collection-bound actions. | +| `parameters` | object | No | Action parameters as a JSON object. For entity references, include @odata.type annotation \(e.g., \{"Target": \{"@odata.type": "Microsoft.Dynamics.CRM.account", "accountid": "..."\}\}\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `result` | object | Action response data. Structure varies by action. Null for actions that return 204 No Content. | +| `success` | boolean | Whether the action executed successfully | + +### `microsoft_dataverse_execute_function` + +Execute a bound or unbound Dataverse function. Functions are read-only operations (e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount, InitializeFrom). For bound functions, provide the entity set name and record ID. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `functionName` | string | Yes | Function name \(e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound functions. | +| `entitySetName` | string | No | Entity set name for bound functions \(e.g., systemusers\). Leave empty for unbound functions. | +| `recordId` | string | No | Record GUID for bound functions. Leave empty for unbound functions. | +| `parameters` | string | No | Function parameters as a comma-separated list of name=value pairs for the URL \(e.g., "LocalizedStandardName=\'Pacific Standard Time\ | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `result` | object | Function response data. Structure varies by function. | +| `success` | boolean | Whether the function executed successfully | + +### `microsoft_dataverse_fetchxml_query` + +Execute a FetchXML query against a Microsoft Dataverse table. FetchXML supports aggregation, grouping, linked-entity joins, and complex filtering beyond OData capabilities. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `fetchXml` | string | Yes | FetchXML query string. Must include <fetch> root element and <entity> child element matching the table logical name. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. | +| `count` | number | Number of records returned in the current page | +| `fetchXmlPagingCookie` | string | Paging cookie for retrieving the next page of results | +| `moreRecords` | boolean | Whether more records are available beyond the current page | +| `success` | boolean | Operation success status | + +### `microsoft_dataverse_get_record` + +Retrieve a single record from a Microsoft Dataverse table by its ID. Supports $select and $expand OData query options. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to retrieve | +| `select` | string | No | Comma-separated list of columns to return \(OData $select\) | +| `expand` | string | No | Navigation properties to expand \(OData $expand\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. | +| `recordId` | string | The record primary key ID \(auto-detected from response\) | +| `success` | boolean | Whether the record was retrieved successfully | + +### `microsoft_dataverse_list_records` + +Query and list records from a Microsoft Dataverse table. Supports OData query options for filtering, selecting columns, ordering, and pagination. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `select` | string | No | Comma-separated list of columns to return \(OData $select\) | +| `filter` | string | No | OData $filter expression \(e.g., statecode eq 0\) | +| `orderBy` | string | No | OData $orderby expression \(e.g., name asc, createdon desc\) | +| `top` | number | No | Maximum number of records to return \(OData $top\) | +| `expand` | string | No | Navigation properties to expand \(OData $expand\) | +| `count` | string | No | Set to "true" to include total record count in response \(OData $count\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. | +| `count` | number | Number of records returned in the current page | +| `totalCount` | number | Total number of matching records server-side \(requires $count=true\) | +| `nextLink` | string | URL for the next page of results | +| `success` | boolean | Operation success status | + +### `microsoft_dataverse_search` + +Perform a full-text relevance search across Microsoft Dataverse tables. Requires Dataverse Search to be enabled on the environment. Supports simple and Lucene query syntax. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `searchTerm` | string | Yes | Search text \(1-100 chars\). Supports simple syntax: + \(AND\), \| \(OR\), - \(NOT\), * \(wildcard\), "exact phrase" | +| `entities` | string | No | JSON array of search entity configs. Each object: \{"Name":"account","SelectColumns":\["name"\],"SearchColumns":\["name"\],"Filter":"statecode eq 0"\} | +| `filter` | string | No | Global OData filter applied across all entities \(e.g., "createdon gt 2024-01-01"\) | +| `facets` | string | No | JSON array of facet specifications \(e.g., \["entityname,count:100","ownerid,count:100"\]\) | +| `top` | number | No | Maximum number of results \(default: 50, max: 100\) | +| `skip` | number | No | Number of results to skip for pagination | +| `orderBy` | string | No | JSON array of sort expressions \(e.g., \["createdon desc"\]\) | +| `searchMode` | string | No | Search mode: "any" \(default, match any term\) or "all" \(match all terms\) | +| `searchType` | string | No | Query type: "simple" \(default\) or "lucene" \(enables regex, fuzzy, proximity, boosting\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `results` | array | Array of search result objects | +| ↳ `Id` | string | Record GUID | +| ↳ `EntityName` | string | Table logical name \(e.g., account, contact\) | +| ↳ `ObjectTypeCode` | number | Entity type code | +| ↳ `Attributes` | object | Record attributes matching the search. Keys are column logical names. | +| ↳ `Highlights` | object | Highlighted search matches. Keys are column names, values are arrays of strings with \{crmhit\}/\{/crmhit\} markers. | +| ↳ `Score` | number | Relevance score for this result | +| `totalCount` | number | Total number of matching records across all tables | +| `count` | number | Number of results returned in this page | +| `facets` | object | Facet results when facets were requested. Keys are facet names, values are arrays of facet value objects with count and value properties. | +| `success` | boolean | Operation success status | + +### `microsoft_dataverse_update_multiple` + +Update multiple records of the same table type in a single request. Each record must include its primary key. Only include columns that need to be changed. Recommended batch size: 100-1000 records. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. | +| `records` | object | Yes | Array of record objects to update. Each record must include its primary key \(e.g., accountid\) and only the columns being changed. The @odata.type annotation is added automatically. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether all records were updated successfully | + +### `microsoft_dataverse_update_record` + +Update an existing record in a Microsoft Dataverse table. Only send the columns you want to change. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to update | +| `data` | object | Yes | Record data to update as a JSON object with column names as keys | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `recordId` | string | The ID of the updated record | +| `success` | boolean | Operation success status | + +### `microsoft_dataverse_upload_file` + +Upload a file to a file or image column on a Dataverse record. Supports single-request upload for files up to 128 MB. The file content must be provided as a base64-encoded string. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `recordId` | string | Yes | Record GUID to upload the file to | +| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) | +| `fileName` | string | Yes | Name of the file being uploaded \(e.g., document.pdf\) | +| `file` | file | No | File to upload \(UserFile object\) | +| `fileContent` | string | No | Base64-encoded file content \(legacy\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `recordId` | string | Record GUID the file was uploaded to | +| `fileColumn` | string | File column the file was uploaded to | +| `fileName` | string | Name of the uploaded file | +| `success` | boolean | Whether the file was uploaded successfully | + +### `microsoft_dataverse_upsert_record` + +Create or update a record in a Microsoft Dataverse table. If a record with the given ID exists, it is updated; otherwise, a new record is created. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | +| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) | +| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to upsert | +| `data` | object | Yes | Record data as a JSON object with column names as keys | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `recordId` | string | The ID of the upserted record | +| `created` | boolean | True if the record was created, false if updated | +| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. | +| `success` | boolean | Operation success status | + +### `microsoft_dataverse_whoami` + +Retrieve the current authenticated user information from Microsoft Dataverse. Useful for testing connectivity and getting the user ID, business unit ID, and organization ID. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `userId` | string | The authenticated user ID | +| `businessUnitId` | string | The business unit ID | +| `organizationId` | string | The organization ID | +| `success` | boolean | Operation success status | + + diff --git a/apps/docs/content/docs/en/tools/redis.mdx b/apps/docs/content/docs/en/tools/redis.mdx new file mode 100644 index 0000000000..cf862f762e --- /dev/null +++ b/apps/docs/content/docs/en/tools/redis.mdx @@ -0,0 +1,452 @@ +--- +title: Redis +description: Key-value operations with Redis +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[Redis](https://redis.io/) is an open-source, in-memory data structure store, used as a distributed key-value database, cache, and message broker. Redis supports a variety of data structures including strings, hashes, lists, sets, and more, making it highly flexible for different application scenarios. + +With Redis, you can: + +- **Store and retrieve key-value data instantly**: Use Redis as a fast database, cache, or session store for high performance. +- **Work with multiple data structures**: Manage not just strings, but also lists, hashes, sets, sorted sets, streams, and bitmaps. +- **Perform atomic operations**: Safely manipulate data using atomic commands and transactions. +- **Support pub/sub messaging**: Use Redis’s publisher/subscriber features for real-time event handling and messaging. +- **Set automatic expiration policies**: Assign TTLs to keys for caching and time-sensitive data. +- **Scale horizontally**: Use Redis Cluster for sharding, high availability, and scalable workloads. + +In Sim, the Redis integration lets your agents connect to any Redis-compatible instance to perform key-value, hash, list, and utility operations. You can build workflows that involve storing, retrieving, or manipulating data in Redis, or manage your app’s cache, sessions, or real-time messaging, directly within your Sim workspace. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Connect to any Redis instance to perform key-value, hash, list, and utility operations via a direct connection. + + + +## Tools + +### `redis_get` + +Get the value of a key from Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was retrieved | +| `value` | string | The value of the key, or null if the key does not exist | + +### `redis_set` + +Set the value of a key in Redis with an optional expiration time in seconds. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to set | +| `value` | string | Yes | The value to store | +| `ex` | number | No | Expiration time in seconds \(optional\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was set | +| `result` | string | The result of the SET operation \(typically "OK"\) | + +### `redis_delete` + +Delete a key from Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was deleted | +| `deletedCount` | number | Number of keys deleted \(0 if key did not exist, 1 if deleted\) | + +### `redis_keys` + +List all keys matching a pattern in Redis. Avoid using on large databases in production; use the Redis Command tool with SCAN for large key spaces. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `pattern` | string | No | Pattern to match keys \(default: * for all keys\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `pattern` | string | The pattern used to match keys | +| `keys` | array | List of keys matching the pattern | +| `count` | number | Number of keys found | + +### `redis_command` + +Execute a raw Redis command as a JSON array (e.g. [ + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `command` | string | Yes | Redis command as a JSON array \(e.g. \["SET", "key", "value"\]\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `command` | string | The command that was executed | +| `result` | json | The result of the command | + +### `redis_hset` + +Set a field in a hash stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The hash key | +| `field` | string | Yes | The field name within the hash | +| `value` | string | Yes | The value to set for the field | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The hash key | +| `field` | string | The field that was set | +| `result` | number | Number of fields added \(1 if new, 0 if updated\) | + +### `redis_hget` + +Get the value of a field in a hash stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The hash key | +| `field` | string | Yes | The field name to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The hash key | +| `field` | string | The field that was retrieved | +| `value` | string | The field value, or null if the field or key does not exist | + +### `redis_hgetall` + +Get all fields and values of a hash stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The hash key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The hash key | +| `fields` | object | All field-value pairs in the hash as a key-value object. Empty object if the key does not exist. | +| `fieldCount` | number | Number of fields in the hash | + +### `redis_hdel` + +Delete a field from a hash stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The hash key | +| `field` | string | Yes | The field name to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The hash key | +| `field` | string | The field that was deleted | +| `deleted` | number | Number of fields removed \(1 if deleted, 0 if field did not exist\) | + +### `redis_incr` + +Increment the integer value of a key by one in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to increment | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was incremented | +| `value` | number | The new value after increment | + +### `redis_incrby` + +Increment the integer value of a key by a given amount in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to increment | +| `increment` | number | Yes | Amount to increment by \(negative to decrement\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was incremented | +| `value` | number | The new value after increment | + +### `redis_expire` + +Set an expiration time (in seconds) on a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to set expiration on | +| `seconds` | number | Yes | Timeout in seconds | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that expiration was set on | +| `result` | number | 1 if the timeout was set, 0 if the key does not exist | + +### `redis_ttl` + +Get the remaining time to live (in seconds) of a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to check TTL for | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was checked | +| `ttl` | number | Remaining TTL in seconds. Positive integer if TTL set, -1 if no expiration, -2 if key does not exist. | + +### `redis_persist` + +Remove the expiration from a key in Redis, making it persist indefinitely. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to persist | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was persisted | +| `result` | number | 1 if the expiration was removed, 0 if the key does not exist or has no expiration | + +### `redis_lpush` + +Prepend a value to a list stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The list key | +| `value` | string | Yes | The value to prepend | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `length` | number | Length of the list after the push | + +### `redis_rpush` + +Append a value to the end of a list stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The list key | +| `value` | string | Yes | The value to append | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `length` | number | Length of the list after the push | + +### `redis_lpop` + +Remove and return the first element of a list stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The list key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `value` | string | The removed element, or null if the list is empty | + +### `redis_rpop` + +Remove and return the last element of a list stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The list key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `value` | string | The removed element, or null if the list is empty | + +### `redis_llen` + +Get the length of a list stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The list key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `length` | number | The length of the list, or 0 if the key does not exist | + +### `redis_lrange` + +Get a range of elements from a list stored at a key in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The list key | +| `start` | number | Yes | Start index \(0-based\) | +| `stop` | number | Yes | Stop index \(-1 for all elements\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `values` | array | List elements in the specified range | +| `count` | number | Number of elements returned | + +### `redis_exists` + +Check if a key exists in Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to check | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was checked | +| `exists` | boolean | Whether the key exists \(true\) or not \(false\) | + +### `redis_setnx` + +Set the value of a key in Redis only if the key does not already exist. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) | +| `key` | string | Yes | The key to set | +| `value` | string | Yes | The value to store | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was set | +| `wasSet` | boolean | Whether the key was set \(true\) or already existed \(false\) | + + diff --git a/apps/docs/content/docs/en/tools/revenuecat.mdx b/apps/docs/content/docs/en/tools/revenuecat.mdx new file mode 100644 index 0000000000..ad4b42ee4a --- /dev/null +++ b/apps/docs/content/docs/en/tools/revenuecat.mdx @@ -0,0 +1,456 @@ +--- +title: RevenueCat +description: Manage in-app subscriptions and entitlements +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[RevenueCat](https://www.revenuecat.com/) is a subscription management platform that enables you to easily set up, manage, and analyze in-app subscriptions for your apps. With RevenueCat, you can handle the complexities of in-app purchases across platforms like iOS, Android, and web—all through a single unified API. + +With RevenueCat, you can: + +- **Manage subscribers**: Track user subscriptions, entitlements, and purchases across all platforms in real time +- **Simplify implementation**: Integrate RevenueCat’s SDKs to abstract away App Store and Play Store purchase logic +- **Automate entitlement logic**: Define and manage what features users should receive when they purchase or renew +- **Analyze revenue**: Access dashboards and analytics to view churn, LTV, revenue, active subscriptions, and more +- **Grant or revoke entitlements**: Manually adjust user access (for example, for customer support or promotions) +- **Operate globally**: Support purchases, refunds, and promotions worldwide with ease + +In Sim, the RevenueCat integration allows your agents to fetch and manage subscriber data, review and update entitlements, and automate subscription-related workflows. Use RevenueCat to centralize subscription operations for your apps directly within your Sim workspace. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Integrate RevenueCat into the workflow. Manage subscribers, entitlements, offerings, and Google Play subscriptions. Retrieve customer subscription status, grant or revoke promotional entitlements, record purchases, update subscriber attributes, and manage Google Play subscription billing. + + + +## Tools + +### `revenuecat_get_customer` + +Retrieve subscriber information by app user ID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `subscriber` | object | The subscriber object with subscriptions and entitlements | +| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen | +| ↳ `original_app_user_id` | string | Original app user ID | +| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase | +| ↳ `management_url` | string | URL for managing the subscriber subscriptions | +| ↳ `subscriptions` | object | Map of product identifiers to subscription objects | +| ↳ `store_transaction_id` | string | Store transaction identifier | +| ↳ `original_transaction_id` | string | Original transaction identifier | +| ↳ `purchase_date` | string | ISO 8601 purchase date | +| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase | +| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected | +| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) | +| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) | +| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded | +| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume | +| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) | +| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects | +| ↳ `grant_date` | string | ISO 8601 grant date | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `product_identifier` | string | Product identifier | +| ↳ `is_active` | boolean | Whether the entitlement is active | +| ↳ `will_renew` | boolean | Whether the entitlement will renew | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) | +| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal | +| ↳ `store` | string | Store the entitlement was granted from | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects | +| `metadata` | object | Subscriber summary metadata | +| ↳ `app_user_id` | string | The app user ID | +| ↳ `first_seen` | string | ISO 8601 date when the subscriber was first seen | +| ↳ `active_entitlements` | number | Number of active entitlements | +| ↳ `active_subscriptions` | number | Number of active subscriptions | + +### `revenuecat_delete_customer` + +Permanently delete a subscriber and all associated data + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `deleted` | boolean | Whether the subscriber was deleted | +| `app_user_id` | string | The deleted app user ID | + +### `revenuecat_create_purchase` + +Record a purchase (receipt) for a subscriber via the REST API + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat API key \(public or secret\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | +| `fetchToken` | string | Yes | The receipt token or purchase token from the store \(App Store receipt, Google Play purchase token, or Stripe subscription ID\) | +| `productId` | string | Yes | The product identifier for the purchase | +| `price` | number | No | The price of the product in the currency specified | +| `currency` | string | No | ISO 4217 currency code \(e.g., USD, EUR\) | +| `isRestore` | boolean | No | Whether this is a restore of a previous purchase | +| `platform` | string | No | Platform of the purchase \(ios, android, amazon, macos, stripe\). Required for Stripe and Paddle purchases. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `subscriber` | object | The updated subscriber object after recording the purchase | +| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen | +| ↳ `original_app_user_id` | string | Original app user ID | +| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase | +| ↳ `management_url` | string | URL for managing the subscriber subscriptions | +| ↳ `subscriptions` | object | Map of product identifiers to subscription objects | +| ↳ `store_transaction_id` | string | Store transaction identifier | +| ↳ `original_transaction_id` | string | Original transaction identifier | +| ↳ `purchase_date` | string | ISO 8601 purchase date | +| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase | +| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected | +| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) | +| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) | +| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded | +| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume | +| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) | +| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects | +| ↳ `grant_date` | string | ISO 8601 grant date | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `product_identifier` | string | Product identifier | +| ↳ `is_active` | boolean | Whether the entitlement is active | +| ↳ `will_renew` | boolean | Whether the entitlement will renew | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) | +| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal | +| ↳ `store` | string | Store the entitlement was granted from | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects | + +### `revenuecat_grant_entitlement` + +Grant a promotional entitlement to a subscriber + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | +| `entitlementIdentifier` | string | Yes | The entitlement identifier to grant | +| `duration` | string | Yes | Duration of the entitlement \(daily, three_day, weekly, monthly, two_month, three_month, six_month, yearly, lifetime\) | +| `startTimeMs` | number | No | Optional start time in milliseconds since Unix epoch. Set to a past time to achieve custom durations shorter than daily. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `subscriber` | object | The updated subscriber object after granting the entitlement | +| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen | +| ↳ `original_app_user_id` | string | Original app user ID | +| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase | +| ↳ `management_url` | string | URL for managing the subscriber subscriptions | +| ↳ `subscriptions` | object | Map of product identifiers to subscription objects | +| ↳ `store_transaction_id` | string | Store transaction identifier | +| ↳ `original_transaction_id` | string | Original transaction identifier | +| ↳ `purchase_date` | string | ISO 8601 purchase date | +| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase | +| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected | +| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) | +| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) | +| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded | +| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume | +| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) | +| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects | +| ↳ `grant_date` | string | ISO 8601 grant date | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `product_identifier` | string | Product identifier | +| ↳ `is_active` | boolean | Whether the entitlement is active | +| ↳ `will_renew` | boolean | Whether the entitlement will renew | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) | +| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal | +| ↳ `store` | string | Store the entitlement was granted from | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects | + +### `revenuecat_revoke_entitlement` + +Revoke all promotional entitlements for a specific entitlement identifier + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | +| `entitlementIdentifier` | string | Yes | The entitlement identifier to revoke | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `subscriber` | object | The updated subscriber object after revoking the entitlement | +| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen | +| ↳ `original_app_user_id` | string | Original app user ID | +| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase | +| ↳ `management_url` | string | URL for managing the subscriber subscriptions | +| ↳ `subscriptions` | object | Map of product identifiers to subscription objects | +| ↳ `store_transaction_id` | string | Store transaction identifier | +| ↳ `original_transaction_id` | string | Original transaction identifier | +| ↳ `purchase_date` | string | ISO 8601 purchase date | +| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase | +| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected | +| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) | +| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) | +| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded | +| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume | +| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) | +| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects | +| ↳ `grant_date` | string | ISO 8601 grant date | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `product_identifier` | string | Product identifier | +| ↳ `is_active` | boolean | Whether the entitlement is active | +| ↳ `will_renew` | boolean | Whether the entitlement will renew | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) | +| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal | +| ↳ `store` | string | Store the entitlement was granted from | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects | + +### `revenuecat_list_offerings` + +List all offerings configured for the project + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat API key | +| `appUserId` | string | Yes | An app user ID to retrieve offerings for | +| `platform` | string | No | Platform to filter offerings \(ios, android, stripe, etc.\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `current_offering_id` | string | The identifier of the current offering | +| `offerings` | array | List of offerings | +| ↳ `identifier` | string | Offering identifier | +| ↳ `description` | string | Offering description | +| ↳ `packages` | array | List of packages in the offering | +| ↳ `identifier` | string | Package identifier | +| ↳ `platform_product_identifier` | string | Platform-specific product identifier | +| `metadata` | object | Offerings metadata | +| ↳ `count` | number | Number of offerings returned | +| ↳ `current_offering_id` | string | Current offering identifier | + +### `revenuecat_update_subscriber_attributes` + +Update custom subscriber attributes (e.g., $email, $displayName, or custom key-value pairs) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | +| `attributes` | json | Yes | JSON object of attributes to set. Each key maps to an object with a "value" field. Example: \{"$email": \{"value": "user@example.com"\}, "$displayName": \{"value": "John"\}\} | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `updated` | boolean | Whether the subscriber attributes were successfully updated | +| `app_user_id` | string | The app user ID of the updated subscriber | + +### `revenuecat_defer_google_subscription` + +Defer a Google Play subscription by extending its billing date by a number of days (Google Play only) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | +| `productId` | string | Yes | The Google Play product identifier of the subscription to defer \(use the part before the colon for products set up after Feb 2023\) | +| `extendByDays` | number | Yes | Number of days to extend the subscription by \(1-365\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `subscriber` | object | The updated subscriber object after deferring the Google subscription | +| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen | +| ↳ `original_app_user_id` | string | Original app user ID | +| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase | +| ↳ `management_url` | string | URL for managing the subscriber subscriptions | +| ↳ `subscriptions` | object | Map of product identifiers to subscription objects | +| ↳ `store_transaction_id` | string | Store transaction identifier | +| ↳ `original_transaction_id` | string | Original transaction identifier | +| ↳ `purchase_date` | string | ISO 8601 purchase date | +| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase | +| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected | +| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) | +| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) | +| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded | +| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume | +| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) | +| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects | +| ↳ `grant_date` | string | ISO 8601 grant date | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `product_identifier` | string | Product identifier | +| ↳ `is_active` | boolean | Whether the entitlement is active | +| ↳ `will_renew` | boolean | Whether the entitlement will renew | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) | +| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal | +| ↳ `store` | string | Store the entitlement was granted from | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects | + +### `revenuecat_refund_google_subscription` + +Refund and optionally revoke a Google Play subscription (Google Play only) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | +| `productId` | string | Yes | The Google Play product identifier of the subscription to refund | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `subscriber` | object | The updated subscriber object after refunding the Google subscription | +| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen | +| ↳ `original_app_user_id` | string | Original app user ID | +| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase | +| ↳ `management_url` | string | URL for managing the subscriber subscriptions | +| ↳ `subscriptions` | object | Map of product identifiers to subscription objects | +| ↳ `store_transaction_id` | string | Store transaction identifier | +| ↳ `original_transaction_id` | string | Original transaction identifier | +| ↳ `purchase_date` | string | ISO 8601 purchase date | +| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase | +| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected | +| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) | +| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) | +| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded | +| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume | +| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) | +| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects | +| ↳ `grant_date` | string | ISO 8601 grant date | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `product_identifier` | string | Product identifier | +| ↳ `is_active` | boolean | Whether the entitlement is active | +| ↳ `will_renew` | boolean | Whether the entitlement will renew | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) | +| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal | +| ↳ `store` | string | Store the entitlement was granted from | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects | + +### `revenuecat_revoke_google_subscription` + +Immediately revoke access to a Google Play subscription and issue a refund (Google Play only) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) | +| `appUserId` | string | Yes | The app user ID of the subscriber | +| `productId` | string | Yes | The Google Play product identifier of the subscription to revoke | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `subscriber` | object | The updated subscriber object after revoking the Google subscription | +| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen | +| ↳ `original_app_user_id` | string | Original app user ID | +| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase | +| ↳ `management_url` | string | URL for managing the subscriber subscriptions | +| ↳ `subscriptions` | object | Map of product identifiers to subscription objects | +| ↳ `store_transaction_id` | string | Store transaction identifier | +| ↳ `original_transaction_id` | string | Original transaction identifier | +| ↳ `purchase_date` | string | ISO 8601 purchase date | +| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase | +| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected | +| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) | +| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) | +| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded | +| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume | +| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) | +| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects | +| ↳ `grant_date` | string | ISO 8601 grant date | +| ↳ `expires_date` | string | ISO 8601 expiration date | +| ↳ `product_identifier` | string | Product identifier | +| ↳ `is_active` | boolean | Whether the entitlement is active | +| ↳ `will_renew` | boolean | Whether the entitlement will renew | +| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) | +| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal | +| ↳ `store` | string | Store the entitlement was granted from | +| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date | +| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects | + + diff --git a/apps/docs/content/docs/en/tools/table.mdx b/apps/docs/content/docs/en/tools/table.mdx new file mode 100644 index 0000000000..acde8e300c --- /dev/null +++ b/apps/docs/content/docs/en/tools/table.mdx @@ -0,0 +1,351 @@ +--- +title: Table +description: User-defined data tables for storing and querying structured data +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations. + +**Why Use Tables?** +- **No external setup**: Create tables instantly without configuring external databases +- **Workflow-native**: Data persists across workflow executions and is accessible from any workflow in your workspace +- **Flexible schema**: Define columns with types (string, number, boolean, date, json) and constraints (required, unique) +- **Powerful querying**: Filter, sort, and paginate data using MongoDB-style operators +- **Agent-friendly**: Tables can be used as tools by AI agents for dynamic data storage and retrieval + +**Key Features:** +- Create tables with custom schemas +- Insert, update, upsert, and delete rows +- Query with filters and sorting +- Batch operations for bulk inserts +- Bulk updates and deletes by filter +- Up to 10,000 rows per table, 100 tables per workspace + +## Creating Tables + +Tables are created from the **Tables** section in the sidebar. Each table requires: +- **Name**: Alphanumeric with underscores (e.g., `customer_leads`) +- **Description**: Optional description of the table's purpose +- **Schema**: Define columns with name, type, and optional constraints + +### Column Types + +| Type | Description | Example Values | +|------|-------------|----------------| +| `string` | Text data | `"John Doe"`, `"active"` | +| `number` | Numeric data | `42`, `99.99` | +| `boolean` | True/false values | `true`, `false` | +| `date` | Date/time values | `"2024-01-15T10:30:00Z"` | +| `json` | Complex nested data | `{"address": {"city": "NYC"}}` | + +### Column Constraints + +- **Required**: Column must have a value (cannot be null) +- **Unique**: Values must be unique across all rows (enables upsert matching) + +## Usage Instructions + +Create and manage custom data tables. Store, query, and manipulate structured data within workflows. + +## Tools + +### `table_query_rows` + +Query rows from a table with filtering, sorting, and pagination + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `filter` | object | No | Filter conditions using MongoDB-style operators | +| `sort` | object | No | Sort order as \{column: "asc"\|"desc"\} | +| `limit` | number | No | Maximum rows to return \(default: 100, max: 1000\) | +| `offset` | number | No | Number of rows to skip \(default: 0\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether query succeeded | +| `rows` | array | Query result rows | +| `rowCount` | number | Number of rows returned | +| `totalCount` | number | Total rows matching filter | +| `limit` | number | Limit used in query | +| `offset` | number | Offset used in query | + +### `table_insert_row` + +Insert a new row into a table + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `data` | object | Yes | Row data as JSON object matching the table schema | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether row was inserted | +| `row` | object | Inserted row data including generated ID | +| `message` | string | Status message | + +### `table_upsert_row` + +Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `data` | object | Yes | Row data to insert or update | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether row was upserted | +| `row` | object | Upserted row data | +| `operation` | string | Operation performed: "insert" or "update" | +| `message` | string | Status message | + +### `table_batch_insert_rows` + +Insert multiple rows at once (up to 1000 rows per batch) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `rows` | array | Yes | Array of row data objects to insert | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether batch insert succeeded | +| `rows` | array | Array of inserted rows with IDs | +| `insertedCount` | number | Number of rows inserted | +| `message` | string | Status message | + +### `table_update_row` + +Update a specific row by its ID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `rowId` | string | Yes | Row ID to update | +| `data` | object | Yes | Data to update \(partial update supported\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether row was updated | +| `row` | object | Updated row data | +| `message` | string | Status message | + +### `table_update_rows_by_filter` + +Update multiple rows matching a filter condition + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `filter` | object | Yes | Filter to match rows for update | +| `data` | object | Yes | Data to apply to matching rows | +| `limit` | number | No | Maximum rows to update \(default: 1000\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether update succeeded | +| `updatedCount` | number | Number of rows updated | +| `updatedRowIds` | array | IDs of updated rows | +| `message` | string | Status message | + +### `table_delete_row` + +Delete a specific row by its ID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `rowId` | string | Yes | Row ID to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether row was deleted | +| `deletedCount` | number | Number of rows deleted \(1 or 0\) | +| `message` | string | Status message | + +### `table_delete_rows_by_filter` + +Delete multiple rows matching a filter condition + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `filter` | object | Yes | Filter to match rows for deletion | +| `limit` | number | No | Maximum rows to delete \(default: 1000\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether delete succeeded | +| `deletedCount` | number | Number of rows deleted | +| `deletedRowIds` | array | IDs of deleted rows | +| `message` | string | Status message | + +### `table_get_row` + +Get a single row by its ID + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | +| `rowId` | string | Yes | Row ID to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether row was found | +| `row` | object | Row data | +| `message` | string | Status message | + +### `table_get_schema` + +Get the schema definition for a table + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `tableId` | string | Yes | Table ID | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Whether schema was retrieved | +| `name` | string | Table name | +| `columns` | array | Array of column definitions | +| `message` | string | Status message | + +## Filter Operators + +Filters use MongoDB-style operators for flexible querying: + +| Operator | Description | Example | +|----------|-------------|---------| +| `$eq` | Equals | `{"status": {"$eq": "active"}}` or `{"status": "active"}` | +| `$ne` | Not equals | `{"status": {"$ne": "deleted"}}` | +| `$gt` | Greater than | `{"age": {"$gt": 18}}` | +| `$gte` | Greater than or equal | `{"score": {"$gte": 80}}` | +| `$lt` | Less than | `{"price": {"$lt": 100}}` | +| `$lte` | Less than or equal | `{"quantity": {"$lte": 10}}` | +| `$in` | In array | `{"status": {"$in": ["active", "pending"]}}` | +| `$nin` | Not in array | `{"type": {"$nin": ["spam", "blocked"]}}` | +| `$contains` | String contains | `{"email": {"$contains": "@gmail.com"}}` | + +### Combining Filters + +Multiple field conditions are combined with AND logic: + +```json +{ + "status": "active", + "age": {"$gte": 18} +} +``` + +Use `$or` for OR logic: + +```json +{ + "$or": [ + {"status": "active"}, + {"status": "pending"} + ] +} +``` + +## Sort Specification + +Specify sort order with column names and direction: + +```json +{ + "createdAt": "desc" +} +``` + +Multi-column sorting: + +```json +{ + "priority": "desc", + "name": "asc" +} +``` + +## Built-in Columns + +Every row automatically includes: + +| Column | Type | Description | +|--------|------|-------------| +| `id` | string | Unique row identifier | +| `createdAt` | date | When the row was created | +| `updatedAt` | date | When the row was last modified | + +These can be used in filters and sorting. + +## Limits + +| Resource | Limit | +|----------|-------| +| Tables per workspace | 100 | +| Rows per table | 10,000 | +| Columns per table | 50 | +| Max row size | 100KB | +| String value length | 10,000 characters | +| Query limit | 1,000 rows | +| Batch insert size | 1,000 rows | +| Bulk update/delete | 1,000 rows | + +## Notes + +- Category: `blocks` +- Type: `table` +- Tables are scoped to workspaces and accessible from any workflow within that workspace +- Data persists across workflow executions +- Use unique constraints to enable upsert functionality +- The visual filter/sort builder provides an easy way to construct queries without writing JSON diff --git a/apps/docs/content/docs/en/tools/upstash.mdx b/apps/docs/content/docs/en/tools/upstash.mdx new file mode 100644 index 0000000000..b67affe492 --- /dev/null +++ b/apps/docs/content/docs/en/tools/upstash.mdx @@ -0,0 +1,357 @@ +--- +title: Upstash +description: Serverless Redis with Upstash +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + +{/* MANUAL-CONTENT-START:intro */} +[Upstash](https://upstash.com/) is a serverless data platform designed for modern applications that need fast, simple, and scalable data storage with minimal setup. Upstash specializes in providing Redis and Kafka as fully managed, pay-per-request cloud services, making it a popular choice for developers building serverless, edge, and event-driven architectures. + +With Upstash Redis, you can: + +- **Store and retrieve data instantly**: Read and write key-value pairs, hashes, lists, sets, and more—all over a high-performance REST API. +- **Scale serverlessly**: No infrastructure to manage. Upstash automatically scales with your app and charges only for what you use. +- **Access globally**: Deploy near your users with multi-region support and global distribution. +- **Integrate easily**: Use Upstash’s REST API in serverless functions, edge workers, Next.js, Vercel, Cloudflare Workers, and more. +- **Automate with scripts**: Run Lua scripts for advanced transactions and automation. +- **Ensure security**: Protect your data with built-in authentication and TLS encryption. + +In Sim, the Upstash integration empowers your agents and workflows to read, write, and manage data in Upstash Redis using simple, unified commands—perfect for building scalable automations, caching results, managing queues, and more, all without dealing with server management. +{/* MANUAL-CONTENT-END */} + + +## Usage Instructions + +Connect to Upstash Redis to perform key-value, hash, list, and utility operations via the REST API. + + + +## Tools + +### `upstash_redis_get` + +Get the value of a key from Upstash Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was retrieved | +| `value` | json | The value of the key \(string\), or null if not found | + +### `upstash_redis_set` + +Set the value of a key in Upstash Redis with an optional expiration time in seconds. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to set | +| `value` | string | Yes | The value to store | +| `ex` | number | No | Expiration time in seconds \(optional\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was set | +| `result` | string | The result of the SET operation \(typically "OK"\) | + +### `upstash_redis_delete` + +Delete a key from Upstash Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was deleted | +| `deletedCount` | number | Number of keys deleted \(0 if key did not exist, 1 if deleted\) | + +### `upstash_redis_keys` + +List keys matching a pattern in Upstash Redis. Defaults to listing all keys (*). + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `pattern` | string | No | Pattern to match keys \(e.g., "user:*"\). Defaults to "*" for all keys. | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `pattern` | string | The pattern used to match keys | +| `keys` | array | List of keys matching the pattern | +| `count` | number | Number of keys found | + +### `upstash_redis_command` + +Execute an arbitrary Redis command against Upstash Redis. Pass the full command as a JSON array (e.g., [ + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `command` | string | Yes | Redis command as a JSON array \(e.g., \["HSET", "myhash", "field1", "value1"\]\) or a simple command string \(e.g., "PING"\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `command` | string | The command that was executed | +| `result` | json | The result of the Redis command | + +### `upstash_redis_hset` + +Set a field in a hash stored at a key in Upstash Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The hash key | +| `field` | string | Yes | The field name within the hash | +| `value` | string | Yes | The value to store in the hash field | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The hash key | +| `field` | string | The field that was set | +| `result` | number | Number of new fields added \(0 if field was updated, 1 if new\) | + +### `upstash_redis_hget` + +Get the value of a field in a hash stored at a key in Upstash Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The hash key | +| `field` | string | Yes | The field name to retrieve | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The hash key | +| `field` | string | The field that was retrieved | +| `value` | json | The value of the hash field \(string\), or null if not found | + +### `upstash_redis_hgetall` + +Get all fields and values of a hash stored at a key in Upstash Redis. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The hash key | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The hash key | +| `fields` | object | All field-value pairs in the hash, keyed by field name | +| `fieldCount` | number | Number of fields in the hash | + +### `upstash_redis_incr` + +Atomically increment the integer value of a key by one in Upstash Redis. If the key does not exist, it is set to 0 before incrementing. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to increment | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was incremented | +| `value` | number | The new value after incrementing | + +### `upstash_redis_expire` + +Set a timeout on a key in Upstash Redis. After the timeout, the key is deleted. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to set expiration on | +| `seconds` | number | Yes | Timeout in seconds | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that expiration was set on | +| `result` | number | 1 if the timeout was set, 0 if the key does not exist | + +### `upstash_redis_ttl` + +Get the remaining time to live of a key in Upstash Redis. Returns -1 if the key has no expiration, -2 if the key does not exist. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to check TTL for | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key checked | +| `ttl` | number | Remaining TTL in seconds. Positive integer if the key has a TTL set, -1 if the key exists with no expiration, -2 if the key does not exist. | + +### `upstash_redis_lpush` + +Prepend a value to the beginning of a list in Upstash Redis. Creates the list if it does not exist. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The list key | +| `value` | string | Yes | The value to prepend to the list | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `length` | number | The length of the list after the push | + +### `upstash_redis_lrange` + +Get a range of elements from a list in Upstash Redis. Use 0 and -1 for start and stop to get all elements. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The list key | +| `start` | number | Yes | Start index \(0-based, negative values count from end\) | +| `stop` | number | Yes | Stop index \(inclusive, -1 for last element\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The list key | +| `values` | array | List of elements in the specified range | +| `count` | number | Number of elements returned | + +### `upstash_redis_exists` + +Check if a key exists in Upstash Redis. Returns true if the key exists, false otherwise. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to check | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was checked | +| `exists` | boolean | Whether the key exists \(true\) or not \(false\) | + +### `upstash_redis_setnx` + +Set the value of a key only if it does not already exist. Returns true if the key was set, false if it already existed. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to set | +| `value` | string | Yes | The value to store if the key does not exist | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was attempted to set | +| `wasSet` | boolean | Whether the key was set \(true\) or already existed \(false\) | + +### `upstash_redis_incrby` + +Increment the integer value of a key by a given amount. Use a negative value to decrement. If the key does not exist, it is set to 0 before the operation. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `restUrl` | string | Yes | Upstash Redis REST URL | +| `restToken` | string | Yes | Upstash Redis REST Token | +| `key` | string | Yes | The key to increment | +| `increment` | number | Yes | Amount to increment by \(use negative value to decrement\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `key` | string | The key that was incremented | +| `value` | number | The new value after incrementing | + + diff --git a/apps/sim/app/api/auth/oauth/token/route.test.ts b/apps/sim/app/api/auth/oauth/token/route.test.ts index 325f4d6c2c..3ed0c576ad 100644 --- a/apps/sim/app/api/auth/oauth/token/route.test.ts +++ b/apps/sim/app/api/auth/oauth/token/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { createMockLogger, createMockRequest } from '@sim/testing' +import { createMockLogger, createMockRequest, mockHybridAuth } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' describe('OAuth Token API Routes', () => { @@ -12,7 +12,7 @@ describe('OAuth Token API Routes', () => { const mockRefreshTokenIfNeeded = vi.fn() const mockGetOAuthToken = vi.fn() const mockAuthorizeCredentialUse = vi.fn() - const mockCheckSessionOrInternalAuth = vi.fn() + let mockCheckSessionOrInternalAuth: ReturnType const mockLogger = createMockLogger() @@ -41,9 +41,7 @@ describe('OAuth Token API Routes', () => { authorizeCredentialUse: mockAuthorizeCredentialUse, })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth, - })) + ;({ mockCheckSessionOrInternalAuth } = mockHybridAuth()) }) afterEach(() => { @@ -73,23 +71,18 @@ describe('OAuth Token API Routes', () => { refreshed: false, }) - // Create mock request const req = createMockRequest('POST', { credentialId: 'credential-id', }) - // Import handler after setting up mocks const { POST } = await import('@/app/api/auth/oauth/token/route') - // Call handler const response = await POST(req) const data = await response.json() - // Verify request was handled correctly expect(response.status).toBe(200) expect(data).toHaveProperty('accessToken', 'fresh-token') - // Verify mocks were called correctly expect(mockAuthorizeCredentialUse).toHaveBeenCalled() expect(mockGetCredential).toHaveBeenCalled() expect(mockRefreshTokenIfNeeded).toHaveBeenCalled() diff --git a/apps/sim/app/api/chat/[identifier]/route.test.ts b/apps/sim/app/api/chat/[identifier]/route.test.ts index 5a753fd4d9..d3a14c5ac3 100644 --- a/apps/sim/app/api/chat/[identifier]/route.test.ts +++ b/apps/sim/app/api/chat/[identifier]/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { loggerMock } from '@sim/testing' +import { loggerMock, requestUtilsMock } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' /** @@ -94,9 +94,7 @@ vi.mock('@/lib/core/utils/sse', () => ({ }, })) -vi.mock('@/lib/core/utils/request', () => ({ - generateRequestId: vi.fn().mockReturnValue('test-request-id'), -})) +vi.mock('@/lib/core/utils/request', () => requestUtilsMock) vi.mock('@/lib/core/security/encryption', () => ({ decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-password' }), diff --git a/apps/sim/app/api/chat/utils.test.ts b/apps/sim/app/api/chat/utils.test.ts index 84c3bb375a..a6b19ad9c9 100644 --- a/apps/sim/app/api/chat/utils.test.ts +++ b/apps/sim/app/api/chat/utils.test.ts @@ -1,4 +1,4 @@ -import { databaseMock, loggerMock } from '@sim/testing' +import { databaseMock, loggerMock, requestUtilsMock } from '@sim/testing' import type { NextResponse } from 'next/server' /** * Tests for chat API utils @@ -37,9 +37,7 @@ vi.mock('@/lib/core/security/encryption', () => ({ decryptSecret: mockDecryptSecret, })) -vi.mock('@/lib/core/utils/request', () => ({ - generateRequestId: vi.fn(), -})) +vi.mock('@/lib/core/utils/request', () => requestUtilsMock) vi.mock('@/lib/core/config/feature-flags', () => ({ isDev: true, diff --git a/apps/sim/app/api/files/delete/route.test.ts b/apps/sim/app/api/files/delete/route.test.ts index 0cc9824f71..26fa2d9f09 100644 --- a/apps/sim/app/api/files/delete/route.test.ts +++ b/apps/sim/app/api/files/delete/route.test.ts @@ -2,6 +2,7 @@ import { createMockRequest, mockAuth, mockCryptoUuid, + mockHybridAuth, mockUuid, setupCommonApiMocks, } from '@sim/testing' @@ -28,13 +29,12 @@ function setupFileApiMocks( authMocks.setUnauthenticated() } - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: authenticated, - userId: authenticated ? 'test-user-id' : undefined, - error: authenticated ? undefined : 'Unauthorized', - }), - })) + const { mockCheckSessionOrInternalAuth } = mockHybridAuth() + mockCheckSessionOrInternalAuth.mockResolvedValue({ + success: authenticated, + userId: authenticated ? 'test-user-id' : undefined, + error: authenticated ? undefined : 'Unauthorized', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), diff --git a/apps/sim/app/api/files/parse/route.test.ts b/apps/sim/app/api/files/parse/route.test.ts index bfdc3bbe71..eb69942d38 100644 --- a/apps/sim/app/api/files/parse/route.test.ts +++ b/apps/sim/app/api/files/parse/route.test.ts @@ -8,6 +8,7 @@ import { createMockRequest, mockAuth, mockCryptoUuid, + mockHybridAuth, mockUuid, setupCommonApiMocks, } from '@sim/testing' @@ -34,13 +35,12 @@ function setupFileApiMocks( authMocks.setUnauthenticated() } - vi.doMock('@/lib/auth/hybrid', () => ({ - checkInternalAuth: vi.fn().mockResolvedValue({ - success: authenticated, - userId: authenticated ? 'test-user-id' : undefined, - error: authenticated ? undefined : 'Unauthorized', - }), - })) + const { mockCheckInternalAuth } = mockHybridAuth() + mockCheckInternalAuth.mockResolvedValue({ + success: authenticated, + userId: authenticated ? 'test-user-id' : undefined, + error: authenticated ? undefined : 'Unauthorized', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), diff --git a/apps/sim/app/api/files/presigned/route.test.ts b/apps/sim/app/api/files/presigned/route.test.ts index 0721269382..4089343a9c 100644 --- a/apps/sim/app/api/files/presigned/route.test.ts +++ b/apps/sim/app/api/files/presigned/route.test.ts @@ -1,4 +1,10 @@ -import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing' +import { + mockAuth, + mockCryptoUuid, + mockHybridAuth, + mockUuid, + setupCommonApiMocks, +} from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -28,13 +34,12 @@ function setupFileApiMocks( authMocks.setUnauthenticated() } - vi.doMock('@/lib/auth/hybrid', () => ({ - checkHybridAuth: vi.fn().mockResolvedValue({ - success: authenticated, - userId: authenticated ? 'test-user-id' : undefined, - error: authenticated ? undefined : 'Unauthorized', - }), - })) + const { mockCheckHybridAuth } = mockHybridAuth() + mockCheckHybridAuth.mockResolvedValue({ + success: authenticated, + userId: authenticated ? 'test-user-id' : undefined, + error: authenticated ? undefined : 'Unauthorized', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), diff --git a/apps/sim/app/api/files/serve/[...path]/route.test.ts b/apps/sim/app/api/files/serve/[...path]/route.test.ts index d09adf048a..d2b3b58a35 100644 --- a/apps/sim/app/api/files/serve/[...path]/route.test.ts +++ b/apps/sim/app/api/files/serve/[...path]/route.test.ts @@ -7,6 +7,7 @@ import { defaultMockUser, mockAuth, mockCryptoUuid, + mockHybridAuth, mockUuid, setupCommonApiMocks, } from '@sim/testing' @@ -54,12 +55,11 @@ describe('File Serve API Route', () => { withUploadUtils: true, }) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: true, - userId: 'test-user-id', - }), - })) + const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth() + serveAuthMock.mockResolvedValue({ + success: true, + userId: 'test-user-id', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), @@ -164,12 +164,11 @@ describe('File Serve API Route', () => { findLocalFile: vi.fn().mockReturnValue('/test/uploads/nested/path/file.txt'), })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: true, - userId: 'test-user-id', - }), - })) + const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth() + serveAuthMock.mockResolvedValue({ + success: true, + userId: 'test-user-id', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), @@ -225,12 +224,11 @@ describe('File Serve API Route', () => { USE_BLOB_STORAGE: false, })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: true, - userId: 'test-user-id', - }), - })) + const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth() + serveAuthMock.mockResolvedValue({ + success: true, + userId: 'test-user-id', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), @@ -290,12 +288,11 @@ describe('File Serve API Route', () => { readFile: vi.fn().mockRejectedValue(new Error('ENOENT: no such file or directory')), })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: true, - userId: 'test-user-id', - }), - })) + const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth() + serveAuthMock.mockResolvedValue({ + success: true, + userId: 'test-user-id', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(false), // File not found = no access @@ -349,12 +346,11 @@ describe('File Serve API Route', () => { for (const test of contentTypeTests) { it(`should serve ${test.ext} file with correct content type`, async () => { - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: true, - userId: 'test-user-id', - }), - })) + const { mockCheckSessionOrInternalAuth: ctAuthMock } = mockHybridAuth() + ctAuthMock.mockResolvedValue({ + success: true, + userId: 'test-user-id', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), diff --git a/apps/sim/app/api/files/upload/route.test.ts b/apps/sim/app/api/files/upload/route.test.ts index 25c8f68adf..be6f4f9bf5 100644 --- a/apps/sim/app/api/files/upload/route.test.ts +++ b/apps/sim/app/api/files/upload/route.test.ts @@ -3,7 +3,13 @@ * * @vitest-environment node */ -import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing' +import { + mockAuth, + mockCryptoUuid, + mockHybridAuth, + mockUuid, + setupCommonApiMocks, +} from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -27,13 +33,12 @@ function setupFileApiMocks( authMocks.setUnauthenticated() } - vi.doMock('@/lib/auth/hybrid', () => ({ - checkHybridAuth: vi.fn().mockResolvedValue({ - success: authenticated, - userId: authenticated ? 'test-user-id' : undefined, - error: authenticated ? undefined : 'Unauthorized', - }), - })) + const { mockCheckHybridAuth } = mockHybridAuth() + mockCheckHybridAuth.mockResolvedValue({ + success: authenticated, + userId: authenticated ? 'test-user-id' : undefined, + error: authenticated ? undefined : 'Unauthorized', + }) vi.doMock('@/app/api/files/authorization', () => ({ verifyFileAccess: vi.fn().mockResolvedValue(true), diff --git a/apps/sim/app/api/knowledge/search/route.test.ts b/apps/sim/app/api/knowledge/search/route.test.ts index b08ab969b4..bf7ae1f72e 100644 --- a/apps/sim/app/api/knowledge/search/route.test.ts +++ b/apps/sim/app/api/knowledge/search/route.test.ts @@ -10,6 +10,7 @@ import { createMockRequest, mockConsoleLogger, mockKnowledgeSchemas, + requestUtilsMock, } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -29,9 +30,7 @@ mockKnowledgeSchemas() vi.mock('@/lib/core/config/env', () => createEnvMock({ OPENAI_API_KEY: 'test-api-key' })) -vi.mock('@/lib/core/utils/request', () => ({ - generateRequestId: vi.fn(() => 'test-request-id'), -})) +vi.mock('@/lib/core/utils/request', () => requestUtilsMock) vi.mock('@/lib/documents/utils', () => ({ retryWithExponentialBackoff: vi.fn().mockImplementation((fn) => fn()), diff --git a/apps/sim/app/api/knowledge/utils.ts b/apps/sim/app/api/knowledge/utils.ts index e7abe35b38..7a2f82d071 100644 --- a/apps/sim/app/api/knowledge/utils.ts +++ b/apps/sim/app/api/knowledge/utils.ts @@ -163,17 +163,18 @@ export async function checkKnowledgeBaseAccess( const kbData = kb[0] - // Case 1: User owns the knowledge base directly - if (kbData.userId === userId) { - return { hasAccess: true, knowledgeBase: kbData } - } - - // Case 2: Knowledge base belongs to a workspace the user has permissions for if (kbData.workspaceId) { + // Workspace KB: use workspace permissions only const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId) if (userPermission !== null) { return { hasAccess: true, knowledgeBase: kbData } } + return { hasAccess: false } + } + + // Legacy non-workspace KB: allow owner access + if (kbData.userId === userId) { + return { hasAccess: true, knowledgeBase: kbData } } return { hasAccess: false } @@ -182,8 +183,8 @@ export async function checkKnowledgeBaseAccess( /** * Check if a user has write access to a knowledge base * Write access is granted if: - * 1. User owns the knowledge base directly, OR - * 2. User has write or admin permissions on the knowledge base's workspace + * 1. KB has a workspace: user has write or admin permissions on that workspace + * 2. KB has no workspace (legacy): user owns the KB directly */ export async function checkKnowledgeBaseWriteAccess( knowledgeBaseId: string, @@ -206,17 +207,18 @@ export async function checkKnowledgeBaseWriteAccess( const kbData = kb[0] - // Case 1: User owns the knowledge base directly - if (kbData.userId === userId) { - return { hasAccess: true, knowledgeBase: kbData } - } - - // Case 2: Knowledge base belongs to a workspace and user has write/admin permissions if (kbData.workspaceId) { + // Workspace KB: use workspace permissions only const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId) if (userPermission === 'write' || userPermission === 'admin') { return { hasAccess: true, knowledgeBase: kbData } } + return { hasAccess: false } + } + + // Legacy non-workspace KB: allow owner access + if (kbData.userId === userId) { + return { hasAccess: true, knowledgeBase: kbData } } return { hasAccess: false } diff --git a/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts b/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts index 95a3f89eda..fc6b5182ed 100644 --- a/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts +++ b/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts @@ -3,10 +3,11 @@ * * @vitest-environment node */ +import { mockHybridAuth } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' -const mockCheckHybridAuth = vi.fn() +let mockCheckHybridAuth: ReturnType const mockGetUserEntityPermissions = vi.fn() const mockGenerateInternalToken = vi.fn() const mockDbSelect = vi.fn() @@ -61,9 +62,7 @@ describe('MCP Serve Route', () => { isDeployed: 'isDeployed', }, })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkHybridAuth: mockCheckHybridAuth, - })) + ;({ mockCheckHybridAuth } = mockHybridAuth()) vi.doMock('@/lib/workspaces/permissions/utils', () => ({ getUserEntityPermissions: mockGetUserEntityPermissions, })) diff --git a/apps/sim/app/api/permission-groups/[id]/route.ts b/apps/sim/app/api/permission-groups/[id]/route.ts index 14b8714447..9391b67d82 100644 --- a/apps/sim/app/api/permission-groups/[id]/route.ts +++ b/apps/sim/app/api/permission-groups/[id]/route.ts @@ -19,6 +19,7 @@ const configSchema = z.object({ allowedModelProviders: z.array(z.string()).nullable().optional(), hideTraceSpans: z.boolean().optional(), hideKnowledgeBaseTab: z.boolean().optional(), + hideTablesTab: z.boolean().optional(), hideCopilot: z.boolean().optional(), hideApiKeysTab: z.boolean().optional(), hideEnvironmentTab: z.boolean().optional(), diff --git a/apps/sim/app/api/permission-groups/route.ts b/apps/sim/app/api/permission-groups/route.ts index 3fec50ae15..a1b6c25476 100644 --- a/apps/sim/app/api/permission-groups/route.ts +++ b/apps/sim/app/api/permission-groups/route.ts @@ -20,6 +20,7 @@ const configSchema = z.object({ allowedModelProviders: z.array(z.string()).nullable().optional(), hideTraceSpans: z.boolean().optional(), hideKnowledgeBaseTab: z.boolean().optional(), + hideTablesTab: z.boolean().optional(), hideCopilot: z.boolean().optional(), hideApiKeysTab: z.boolean().optional(), hideEnvironmentTab: z.boolean().optional(), diff --git a/apps/sim/app/api/schedules/[id]/route.test.ts b/apps/sim/app/api/schedules/[id]/route.test.ts index e2377ddc3e..ca0e723be5 100644 --- a/apps/sim/app/api/schedules/[id]/route.test.ts +++ b/apps/sim/app/api/schedules/[id]/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { auditMock, databaseMock, loggerMock } from '@sim/testing' +import { auditMock, databaseMock, loggerMock, requestUtilsMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -31,9 +31,7 @@ vi.mock('drizzle-orm', () => ({ eq: vi.fn(), })) -vi.mock('@/lib/core/utils/request', () => ({ - generateRequestId: () => 'test-request-id', -})) +vi.mock('@/lib/core/utils/request', () => requestUtilsMock) vi.mock('@sim/logger', () => loggerMock) diff --git a/apps/sim/app/api/schedules/route.test.ts b/apps/sim/app/api/schedules/route.test.ts index 9d1530d501..434bc2fa8d 100644 --- a/apps/sim/app/api/schedules/route.test.ts +++ b/apps/sim/app/api/schedules/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { databaseMock, loggerMock } from '@sim/testing' +import { databaseMock, loggerMock, requestUtilsMock } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -43,9 +43,7 @@ vi.mock('drizzle-orm', () => ({ isNull: vi.fn(), })) -vi.mock('@/lib/core/utils/request', () => ({ - generateRequestId: () => 'test-request-id', -})) +vi.mock('@/lib/core/utils/request', () => requestUtilsMock) vi.mock('@sim/logger', () => loggerMock) diff --git a/apps/sim/app/api/table/[tableId]/route.ts b/apps/sim/app/api/table/[tableId]/route.ts new file mode 100644 index 0000000000..8f8e7f0df9 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/route.ts @@ -0,0 +1,138 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { deleteTable, type TableSchema } from '@/lib/table' +import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils' + +const logger = createLogger('TableDetailAPI') + +const GetTableSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), +}) + +interface TableRouteParams { + params: Promise<{ tableId: string }> +} + +/** GET /api/table/[tableId] - Retrieves a single table's details. */ +export async function GET(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized table access attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const validated = GetTableSchema.parse({ + workspaceId: searchParams.get('workspaceId'), + }) + + const result = await checkAccess(tableId, authResult.userId, 'read') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) + if (!isValidWorkspace) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`) + + const schemaData = table.schema as TableSchema + + return NextResponse.json({ + success: true, + data: { + table: { + id: table.id, + name: table.name, + description: table.description, + schema: { + columns: schemaData.columns.map(normalizeColumn), + }, + rowCount: table.rowCount, + maxRows: table.maxRows, + createdAt: + table.createdAt instanceof Date + ? table.createdAt.toISOString() + : String(table.createdAt), + updatedAt: + table.updatedAt instanceof Date + ? table.updatedAt.toISOString() + : String(table.updatedAt), + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error getting table:`, error) + return NextResponse.json({ error: 'Failed to get table' }, { status: 500 }) + } +} + +/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */ +export async function DELETE(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized table delete attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const validated = GetTableSchema.parse({ + workspaceId: searchParams.get('workspaceId'), + }) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) + if (!isValidWorkspace) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + await deleteTable(tableId, requestId) + + return NextResponse.json({ + success: true, + data: { + message: 'Table deleted successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error deleting table:`, error) + return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts new file mode 100644 index 0000000000..15a4473283 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts @@ -0,0 +1,276 @@ +import { db } from '@sim/db' +import { userTableRows } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import type { RowData, TableSchema } from '@/lib/table' +import { validateRowData } from '@/lib/table' +import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils' + +const logger = createLogger('TableRowAPI') + +const GetRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), +}) + +const UpdateRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), +}) + +const DeleteRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), +}) + +interface RowRouteParams { + params: Promise<{ tableId: string; rowId: string }> +} + +/** GET /api/table/[tableId]/rows/[rowId] - Retrieves a single row. */ +export async function GET(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + const { tableId, rowId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const validated = GetRowSchema.parse({ + workspaceId: searchParams.get('workspaceId'), + }) + + const result = await checkAccess(tableId, authResult.userId, 'read') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) + if (!isValidWorkspace) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const [row] = await db + .select({ + id: userTableRows.id, + data: userTableRows.data, + createdAt: userTableRows.createdAt, + updatedAt: userTableRows.updatedAt, + }) + .from(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId) + ) + ) + .limit(1) + + if (!row) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + logger.info(`[${requestId}] Retrieved row ${rowId} from table ${tableId}`) + + return NextResponse.json({ + success: true, + data: { + row: { + id: row.id, + data: row.data, + createdAt: row.createdAt.toISOString(), + updatedAt: row.updatedAt.toISOString(), + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error getting row:`, error) + return NextResponse.json({ error: 'Failed to get row' }, { status: 500 }) + } +} + +/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row (supports partial updates). */ +export async function PATCH(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + const { tableId, rowId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body: unknown = await request.json() + const validated = UpdateRowSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) + if (!isValidWorkspace) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + // Fetch existing row to support partial updates + const [existingRow] = await db + .select({ data: userTableRows.data }) + .from(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId) + ) + ) + .limit(1) + + if (!existingRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + // Merge existing data with incoming partial data (incoming takes precedence) + const mergedData = { + ...(existingRow.data as RowData), + ...(validated.data as RowData), + } + + const validation = await validateRowData({ + rowData: mergedData, + schema: table.schema as TableSchema, + tableId, + excludeRowId: rowId, + }) + if (!validation.valid) return validation.response + + const now = new Date() + + const [updatedRow] = await db + .update(userTableRows) + .set({ + data: mergedData, + updatedAt: now, + }) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId) + ) + ) + .returning() + + if (!updatedRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`) + + return NextResponse.json({ + success: true, + data: { + row: { + id: updatedRow.id, + data: updatedRow.data, + createdAt: updatedRow.createdAt.toISOString(), + updatedAt: updatedRow.updatedAt.toISOString(), + }, + message: 'Row updated successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error updating row:`, error) + return NextResponse.json({ error: 'Failed to update row' }, { status: 500 }) + } +} + +/** DELETE /api/table/[tableId]/rows/[rowId] - Deletes a single row. */ +export async function DELETE(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + const { tableId, rowId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body: unknown = await request.json() + const validated = DeleteRowSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) + if (!isValidWorkspace) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const [deletedRow] = await db + .delete(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId) + ) + ) + .returning() + + if (!deletedRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`) + + return NextResponse.json({ + success: true, + data: { + message: 'Row deleted successfully', + deletedCount: 1, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error deleting row:`, error) + return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/[tableId]/rows/route.ts b/apps/sim/app/api/table/[tableId]/rows/route.ts new file mode 100644 index 0000000000..47bd0fe1a5 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/rows/route.ts @@ -0,0 +1,725 @@ +import { db } from '@sim/db' +import { userTableRows } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import type { Filter, RowData, Sort, TableSchema } from '@/lib/table' +import { + checkUniqueConstraintsDb, + getUniqueColumns, + TABLE_LIMITS, + USER_TABLE_ROWS_SQL_NAME, + validateBatchRows, + validateRowAgainstSchema, + validateRowData, + validateRowSize, +} from '@/lib/table' +import { buildFilterClause, buildSortClause } from '@/lib/table/sql' +import { accessError, checkAccess } from '../../utils' + +const logger = createLogger('TableRowsAPI') + +const InsertRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), +}) + +const BatchInsertRowsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rows: z + .array(z.record(z.unknown()), { required_error: 'Rows array is required' }) + .min(1, 'At least one row is required') + .max(1000, 'Cannot insert more than 1000 rows per batch'), +}) + +const QueryRowsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: z.record(z.unknown()).optional(), + sort: z.record(z.enum(['asc', 'desc'])).optional(), + limit: z.coerce + .number({ required_error: 'Limit must be a number' }) + .int('Limit must be an integer') + .min(1, 'Limit must be at least 1') + .max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`) + .optional() + .default(100), + offset: z.coerce + .number({ required_error: 'Offset must be a number' }) + .int('Offset must be an integer') + .min(0, 'Offset must be 0 or greater') + .optional() + .default(0), +}) + +const UpdateRowsByFilterSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }), + data: z.record(z.unknown(), { required_error: 'Update data is required' }), + limit: z.coerce + .number({ required_error: 'Limit must be a number' }) + .int('Limit must be an integer') + .min(1, 'Limit must be at least 1') + .max(1000, 'Cannot update more than 1000 rows per operation') + .optional(), +}) + +const DeleteRowsByFilterSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }), + limit: z.coerce + .number({ required_error: 'Limit must be a number' }) + .int('Limit must be an integer') + .min(1, 'Limit must be at least 1') + .max(1000, 'Cannot delete more than 1000 rows per operation') + .optional(), +}) + +const DeleteRowsByIdsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rowIds: z + .array(z.string().min(1), { required_error: 'Row IDs are required' }) + .min(1, 'At least one row ID is required') + .max(1000, 'Cannot delete more than 1000 rows per operation'), +}) + +const DeleteRowsRequestSchema = z.union([DeleteRowsByFilterSchema, DeleteRowsByIdsSchema]) + +interface TableRowsRouteParams { + params: Promise<{ tableId: string }> +} + +async function handleBatchInsert( + requestId: string, + tableId: string, + body: z.infer, + userId: string +): Promise { + const validated = BatchInsertRowsSchema.parse(body) + + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const workspaceId = validated.workspaceId + + const remainingCapacity = table.maxRows - table.rowCount + if (remainingCapacity < validated.rows.length) { + return NextResponse.json( + { + error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`, + }, + { status: 400 } + ) + } + + const validation = await validateBatchRows({ + rows: validated.rows as RowData[], + schema: table.schema as TableSchema, + tableId, + }) + if (!validation.valid) return validation.response + + const now = new Date() + const rowsToInsert = validated.rows.map((data) => ({ + id: `row_${crypto.randomUUID().replace(/-/g, '')}`, + tableId, + workspaceId, + data, + createdAt: now, + updatedAt: now, + createdBy: userId, + })) + + const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning() + + logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`) + + return NextResponse.json({ + success: true, + data: { + rows: insertedRows.map((r) => ({ + id: r.id, + data: r.data, + createdAt: r.createdAt.toISOString(), + updatedAt: r.updatedAt.toISOString(), + })), + insertedCount: insertedRows.length, + message: `Successfully inserted ${insertedRows.length} rows`, + }, + }) +} + +/** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */ +export async function POST(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body: unknown = await request.json() + + if ( + typeof body === 'object' && + body !== null && + 'rows' in body && + Array.isArray((body as Record).rows) + ) { + return handleBatchInsert( + requestId, + tableId, + body as z.infer, + authResult.userId + ) + } + + const validated = InsertRowSchema.parse(body) + + const accessResult = await checkAccess(tableId, authResult.userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const workspaceId = validated.workspaceId + const rowData = validated.data as RowData + + const validation = await validateRowData({ + rowData, + schema: table.schema as TableSchema, + tableId, + }) + if (!validation.valid) return validation.response + + if (table.rowCount >= table.maxRows) { + return NextResponse.json( + { error: `Table row limit reached (${table.maxRows} rows max)` }, + { status: 400 } + ) + } + + const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}` + const now = new Date() + + const [row] = await db + .insert(userTableRows) + .values({ + id: rowId, + tableId, + workspaceId, + data: validated.data, + createdAt: now, + updatedAt: now, + createdBy: authResult.userId, + }) + .returning() + + logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`) + + return NextResponse.json({ + success: true, + data: { + row: { + id: row.id, + data: row.data, + createdAt: row.createdAt.toISOString(), + updatedAt: row.updatedAt.toISOString(), + }, + message: 'Row inserted successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error inserting row:`, error) + return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 }) + } +} + +/** GET /api/table/[tableId]/rows - Queries rows with filtering, sorting, and pagination. */ +export async function GET(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + const filterParam = searchParams.get('filter') + const sortParam = searchParams.get('sort') + const limit = searchParams.get('limit') + const offset = searchParams.get('offset') + + let filter: Record | undefined + let sort: Sort | undefined + + try { + if (filterParam) { + filter = JSON.parse(filterParam) as Record + } + if (sortParam) { + sort = JSON.parse(sortParam) as Sort + } + } catch { + return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 }) + } + + const validated = QueryRowsSchema.parse({ + workspaceId, + filter, + sort, + limit, + offset, + }) + + const accessResult = await checkAccess(tableId, authResult.userId, 'read') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const baseConditions = [ + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId), + ] + + if (validated.filter) { + const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) + if (filterClause) { + baseConditions.push(filterClause) + } + } + + let query = db + .select({ + id: userTableRows.id, + data: userTableRows.data, + createdAt: userTableRows.createdAt, + updatedAt: userTableRows.updatedAt, + }) + .from(userTableRows) + .where(and(...baseConditions)) + + if (validated.sort) { + const schema = table.schema as TableSchema + const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns) + if (sortClause) { + query = query.orderBy(sortClause) as typeof query + } + } else { + query = query.orderBy(userTableRows.createdAt) as typeof query + } + + const countQuery = db + .select({ count: sql`count(*)` }) + .from(userTableRows) + .where(and(...baseConditions)) + + const [{ count: totalCount }] = await countQuery + + const rows = await query.limit(validated.limit).offset(validated.offset) + + logger.info( + `[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})` + ) + + return NextResponse.json({ + success: true, + data: { + rows: rows.map((r) => ({ + id: r.id, + data: r.data, + createdAt: r.createdAt.toISOString(), + updatedAt: r.updatedAt.toISOString(), + })), + rowCount: rows.length, + totalCount: Number(totalCount), + limit: validated.limit, + offset: validated.offset, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error querying rows:`, error) + return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 }) + } +} + +/** PUT /api/table/[tableId]/rows - Updates rows matching filter criteria. */ +export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body: unknown = await request.json() + const validated = UpdateRowsByFilterSchema.parse(body) + + const accessResult = await checkAccess(tableId, authResult.userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updateData = validated.data as RowData + + const sizeValidation = validateRowSize(updateData) + if (!sizeValidation.valid) { + return NextResponse.json( + { error: 'Invalid row data', details: sizeValidation.errors }, + { status: 400 } + ) + } + + const baseConditions = [ + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId), + ] + + const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) + if (filterClause) { + baseConditions.push(filterClause) + } + + let matchingRowsQuery = db + .select({ + id: userTableRows.id, + data: userTableRows.data, + }) + .from(userTableRows) + .where(and(...baseConditions)) + + if (validated.limit) { + matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery + } + + const matchingRows = await matchingRowsQuery + + if (matchingRows.length === 0) { + return NextResponse.json( + { + success: true, + data: { + message: 'No rows matched the filter criteria', + updatedCount: 0, + }, + }, + { status: 200 } + ) + } + + if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) { + logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`) + } + + for (const row of matchingRows) { + const existingData = row.data as RowData + const mergedData = { ...existingData, ...updateData } + const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema) + if (!rowValidation.valid) { + return NextResponse.json( + { + error: 'Updated data does not match schema', + details: rowValidation.errors, + affectedRowId: row.id, + }, + { status: 400 } + ) + } + } + + const uniqueColumns = getUniqueColumns(table.schema as TableSchema) + if (uniqueColumns.length > 0) { + // If updating multiple rows, check that updateData doesn't set any unique column + // (would cause all rows to have the same value, violating uniqueness) + if (matchingRows.length > 1) { + const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData) + if (uniqueColumnsInUpdate.length > 0) { + return NextResponse.json( + { + error: 'Cannot set unique column values when updating multiple rows', + details: [ + `Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` + + `Updating ${matchingRows.length} rows with the same value would violate uniqueness.`, + ], + }, + { status: 400 } + ) + } + } + + // Check unique constraints against database for each row + for (const row of matchingRows) { + const existingData = row.data as RowData + const mergedData = { ...existingData, ...updateData } + const uniqueValidation = await checkUniqueConstraintsDb( + tableId, + mergedData, + table.schema as TableSchema, + row.id + ) + + if (!uniqueValidation.valid) { + return NextResponse.json( + { + error: 'Unique constraint violation', + details: uniqueValidation.errors, + affectedRowId: row.id, + }, + { status: 400 } + ) + } + } + } + + const now = new Date() + + await db.transaction(async (trx) => { + let totalUpdated = 0 + + for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) { + const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE) + const updatePromises = batch.map((row) => { + const existingData = row.data as RowData + return trx + .update(userTableRows) + .set({ + data: { ...existingData, ...updateData }, + updatedAt: now, + }) + .where(eq(userTableRows.id, row.id)) + }) + await Promise.all(updatePromises) + totalUpdated += batch.length + logger.info( + `[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)` + ) + } + }) + + logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`) + + return NextResponse.json({ + success: true, + data: { + message: 'Rows updated successfully', + updatedCount: matchingRows.length, + updatedRowIds: matchingRows.map((r) => r.id), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error updating rows by filter:`, error) + + const errorMessage = error instanceof Error ? error.message : String(error) + const detailedError = `Failed to update rows: ${errorMessage}` + + return NextResponse.json({ error: detailedError }, { status: 500 }) + } +} + +/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */ +export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body: unknown = await request.json() + const validated = DeleteRowsRequestSchema.parse(body) + + const accessResult = await checkAccess(tableId, authResult.userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const baseConditions = [ + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId), + ] + + let rowIds: string[] = [] + let missingRowIds: string[] | undefined + let requestedCount: number | undefined + + if ('rowIds' in validated) { + const uniqueRequestedRowIds = Array.from(new Set(validated.rowIds)) + requestedCount = uniqueRequestedRowIds.length + + const matchingRows = await db + .select({ id: userTableRows.id }) + .from(userTableRows) + .where( + and( + ...baseConditions, + sql`${userTableRows.id} = ANY(ARRAY[${sql.join( + uniqueRequestedRowIds.map((id) => sql`${id}`), + sql`, ` + )}])` + ) + ) + + const matchedRowIds = matchingRows.map((r) => r.id) + const matchedIdSet = new Set(matchedRowIds) + missingRowIds = uniqueRequestedRowIds.filter((id) => !matchedIdSet.has(id)) + rowIds = matchedRowIds + } else { + const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) + if (filterClause) { + baseConditions.push(filterClause) + } + + let matchingRowsQuery = db + .select({ id: userTableRows.id }) + .from(userTableRows) + .where(and(...baseConditions)) + + if (validated.limit) { + matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery + } + + const matchingRows = await matchingRowsQuery + rowIds = matchingRows.map((r) => r.id) + } + + if (rowIds.length === 0) { + return NextResponse.json( + { + success: true, + data: { + message: + 'rowIds' in validated + ? 'No matching rows found for the provided IDs' + : 'No rows matched the filter criteria', + deletedCount: 0, + deletedRowIds: [], + ...(requestedCount !== undefined ? { requestedCount } : {}), + ...(missingRowIds ? { missingRowIds } : {}), + }, + }, + { status: 200 } + ) + } + + if (rowIds.length > TABLE_LIMITS.DELETE_BATCH_SIZE) { + logger.warn(`[${requestId}] Deleting ${rowIds.length} rows. This may take some time.`) + } + + await db.transaction(async (trx) => { + let totalDeleted = 0 + + for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) { + const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE) + await trx.delete(userTableRows).where( + and( + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId), + sql`${userTableRows.id} = ANY(ARRAY[${sql.join( + batch.map((id) => sql`${id}`), + sql`, ` + )}])` + ) + ) + totalDeleted += batch.length + logger.info( + `[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)` + ) + } + }) + + logger.info(`[${requestId}] Deleted ${rowIds.length} rows from table ${tableId}`) + + return NextResponse.json({ + success: true, + data: { + message: 'Rows deleted successfully', + deletedCount: rowIds.length, + deletedRowIds: rowIds, + ...(requestedCount !== undefined ? { requestedCount } : {}), + ...(missingRowIds ? { missingRowIds } : {}), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error deleting rows by filter:`, error) + + const errorMessage = error instanceof Error ? error.message : String(error) + const detailedError = `Failed to delete rows: ${errorMessage}` + + return NextResponse.json({ error: detailedError }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts b/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts new file mode 100644 index 0000000000..a7b9e81468 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts @@ -0,0 +1,182 @@ +import { db } from '@sim/db' +import { userTableRows } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, or, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import type { RowData, TableSchema } from '@/lib/table' +import { getUniqueColumns, validateRowData } from '@/lib/table' +import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils' + +const logger = createLogger('TableUpsertAPI') + +const UpsertRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), +}) + +interface UpsertRouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/table/[tableId]/rows/upsert - Inserts or updates based on unique columns. */ +export async function POST(request: NextRequest, { params }: UpsertRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body: unknown = await request.json() + const validated = UpsertRowSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) + if (!isValidWorkspace) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const schema = table.schema as TableSchema + const rowData = validated.data as RowData + + const validation = await validateRowData({ + rowData, + schema, + tableId, + checkUnique: false, + }) + if (!validation.valid) return validation.response + + const uniqueColumns = getUniqueColumns(schema) + + if (uniqueColumns.length === 0) { + return NextResponse.json( + { + error: + 'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.', + }, + { status: 400 } + ) + } + + const uniqueFilters = uniqueColumns.map((col) => { + const value = rowData[col.name] + if (value === undefined || value === null) { + return null + } + return sql`${userTableRows.data}->>${col.name} = ${String(value)}` + }) + + const validUniqueFilters = uniqueFilters.filter((f): f is Exclude => f !== null) + + if (validUniqueFilters.length === 0) { + return NextResponse.json( + { + error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`, + }, + { status: 400 } + ) + } + + const [existingRow] = await db + .select() + .from(userTableRows) + .where( + and( + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId), + or(...validUniqueFilters) + ) + ) + .limit(1) + + const now = new Date() + + if (!existingRow && table.rowCount >= table.maxRows) { + return NextResponse.json( + { error: `Table row limit reached (${table.maxRows} rows max)` }, + { status: 400 } + ) + } + + const upsertResult = await db.transaction(async (trx) => { + if (existingRow) { + const [updatedRow] = await trx + .update(userTableRows) + .set({ + data: validated.data, + updatedAt: now, + }) + .where(eq(userTableRows.id, existingRow.id)) + .returning() + + return { + row: updatedRow, + operation: 'update' as const, + } + } + + const [insertedRow] = await trx + .insert(userTableRows) + .values({ + id: `row_${crypto.randomUUID().replace(/-/g, '')}`, + tableId, + workspaceId: validated.workspaceId, + data: validated.data, + createdAt: now, + updatedAt: now, + createdBy: authResult.userId, + }) + .returning() + + return { + row: insertedRow, + operation: 'insert' as const, + } + }) + + logger.info( + `[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}` + ) + + return NextResponse.json({ + success: true, + data: { + row: { + id: upsertResult.row.id, + data: upsertResult.row.data, + createdAt: upsertResult.row.createdAt.toISOString(), + updatedAt: upsertResult.row.updatedAt.toISOString(), + }, + operation: upsertResult.operation, + message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error upserting row:`, error) + + const errorMessage = error instanceof Error ? error.message : String(error) + const detailedError = `Failed to upsert row: ${errorMessage}` + + return NextResponse.json({ error: detailedError }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/route.ts b/apps/sim/app/api/table/route.ts new file mode 100644 index 0000000000..cd9aa7ff3e --- /dev/null +++ b/apps/sim/app/api/table/route.ts @@ -0,0 +1,258 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { + canCreateTable, + createTable, + getWorkspaceTableLimits, + listTables, + TABLE_LIMITS, + type TableSchema, +} from '@/lib/table' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { normalizeColumn } from './utils' + +const logger = createLogger('TableAPI') + +const ColumnSchema = z.object({ + name: z + .string() + .min(1, 'Column name is required') + .max( + TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH, + `Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less` + ) + .regex( + /^[a-z_][a-z0-9_]*$/i, + 'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores' + ), + type: z.enum(['string', 'number', 'boolean', 'date', 'json'], { + errorMap: () => ({ + message: 'Column type must be one of: string, number, boolean, date, json', + }), + }), + required: z.boolean().optional().default(false), + unique: z.boolean().optional().default(false), +}) + +const CreateTableSchema = z.object({ + name: z + .string() + .min(1, 'Table name is required') + .max( + TABLE_LIMITS.MAX_TABLE_NAME_LENGTH, + `Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less` + ) + .regex( + /^[a-z_][a-z0-9_]*$/i, + 'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores' + ), + description: z + .string() + .max( + TABLE_LIMITS.MAX_DESCRIPTION_LENGTH, + `Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less` + ) + .optional(), + schema: z.object({ + columns: z + .array(ColumnSchema) + .min(1, 'Table must have at least one column') + .max( + TABLE_LIMITS.MAX_COLUMNS_PER_TABLE, + `Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns` + ), + }), + workspaceId: z.string().min(1, 'Workspace ID is required'), +}) + +const ListTablesSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), +}) + +interface WorkspaceAccessResult { + hasAccess: boolean + canWrite: boolean +} + +async function checkWorkspaceAccess( + workspaceId: string, + userId: string +): Promise { + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + + if (permission === null) { + return { hasAccess: false, canWrite: false } + } + + const canWrite = permission === 'admin' || permission === 'write' + return { hasAccess: true, canWrite } +} + +/** POST /api/table - Creates a new user-defined table. */ +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body: unknown = await request.json() + const params = CreateTableSchema.parse(body) + + const { hasAccess, canWrite } = await checkWorkspaceAccess( + params.workspaceId, + authResult.userId + ) + + if (!hasAccess || !canWrite) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + // Check billing plan limits + const existingTables = await listTables(params.workspaceId) + const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length) + + if (!canCreate) { + return NextResponse.json( + { + error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`, + }, + { status: 403 } + ) + } + + // Get plan-based row limits + const planLimits = await getWorkspaceTableLimits(params.workspaceId) + const maxRowsPerTable = planLimits.maxRowsPerTable + + const normalizedSchema: TableSchema = { + columns: params.schema.columns.map(normalizeColumn), + } + + const table = await createTable( + { + name: params.name, + description: params.description, + schema: normalizedSchema, + workspaceId: params.workspaceId, + userId: authResult.userId, + maxRows: maxRowsPerTable, + }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + table: { + id: table.id, + name: table.name, + description: table.description, + schema: table.schema, + rowCount: table.rowCount, + maxRows: table.maxRows, + createdAt: + table.createdAt instanceof Date + ? table.createdAt.toISOString() + : String(table.createdAt), + updatedAt: + table.updatedAt instanceof Date + ? table.updatedAt.toISOString() + : String(table.updatedAt), + }, + message: 'Table created successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if ( + error.message.includes('Invalid table name') || + error.message.includes('Invalid schema') || + error.message.includes('already exists') || + error.message.includes('maximum table limit') + ) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error creating table:`, error) + return NextResponse.json({ error: 'Failed to create table' }, { status: 500 }) + } +} + +/** GET /api/table - Lists all tables in a workspace. */ +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + const validation = ListTablesSchema.safeParse({ workspaceId }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const params = validation.data + + const { hasAccess } = await checkWorkspaceAccess(params.workspaceId, authResult.userId) + + if (!hasAccess) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const tables = await listTables(params.workspaceId) + + logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`) + + return NextResponse.json({ + success: true, + data: { + tables: tables.map((t) => { + const schemaData = t.schema as TableSchema + return { + ...t, + schema: { + columns: schemaData.columns.map(normalizeColumn), + }, + createdAt: + t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt), + updatedAt: + t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt), + } + }), + totalCount: tables.length, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error listing tables:`, error) + return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/utils.ts b/apps/sim/app/api/table/utils.ts new file mode 100644 index 0000000000..5aee66315a --- /dev/null +++ b/apps/sim/app/api/table/utils.ts @@ -0,0 +1,164 @@ +import { createLogger } from '@sim/logger' +import { NextResponse } from 'next/server' +import type { ColumnDefinition, TableDefinition } from '@/lib/table' +import { getTableById } from '@/lib/table' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('TableUtils') + +export interface TableAccessResult { + hasAccess: true + table: TableDefinition +} + +export interface TableAccessDenied { + hasAccess: false + notFound?: boolean + reason?: string +} + +export type TableAccessCheck = TableAccessResult | TableAccessDenied + +export type AccessResult = { ok: true; table: TableDefinition } | { ok: false; status: 404 | 403 } + +export interface ApiErrorResponse { + error: string + details?: unknown +} + +/** + * Check if a user has read access to a table. + * Read access requires any workspace permission (read, write, or admin). + */ +export async function checkTableAccess(tableId: string, userId: string): Promise { + const table = await getTableById(tableId) + + if (!table) { + return { hasAccess: false, notFound: true } + } + + const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId) + if (userPermission !== null) { + return { hasAccess: true, table } + } + + return { hasAccess: false, reason: 'User does not have access to this table' } +} + +/** + * Check if a user has write access to a table. + * Write access requires write or admin workspace permission. + */ +export async function checkTableWriteAccess( + tableId: string, + userId: string +): Promise { + const table = await getTableById(tableId) + + if (!table) { + return { hasAccess: false, notFound: true } + } + + const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId) + if (userPermission === 'write' || userPermission === 'admin') { + return { hasAccess: true, table } + } + + return { hasAccess: false, reason: 'User does not have write access to this table' } +} + +/** + * Access check returning `{ ok, table }` or `{ ok: false, status }`. + * Uses workspace permissions only. + */ +export async function checkAccess( + tableId: string, + userId: string, + level: 'read' | 'write' | 'admin' = 'read' +): Promise { + const table = await getTableById(tableId) + + if (!table) { + return { ok: false, status: 404 } + } + + const permission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId) + const hasAccess = + permission !== null && + (level === 'read' || + (level === 'write' && (permission === 'write' || permission === 'admin')) || + (level === 'admin' && permission === 'admin')) + + return hasAccess ? { ok: true, table } : { ok: false, status: 403 } +} + +export function accessError( + result: { ok: false; status: 404 | 403 }, + requestId: string, + context?: string +): NextResponse { + const message = result.status === 404 ? 'Table not found' : 'Access denied' + logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`) + return NextResponse.json({ error: message }, { status: result.status }) +} + +/** + * Converts a TableAccessDenied result to an appropriate HTTP response. + * Use with checkTableAccess or checkTableWriteAccess. + */ +export function tableAccessError( + result: TableAccessDenied, + requestId: string, + context?: string +): NextResponse { + const status = result.notFound ? 404 : 403 + const message = result.notFound ? 'Table not found' : (result.reason ?? 'Access denied') + logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`) + return NextResponse.json({ error: message }, { status }) +} + +export async function verifyTableWorkspace(tableId: string, workspaceId: string): Promise { + const table = await getTableById(tableId) + return table?.workspaceId === workspaceId +} + +export function errorResponse( + message: string, + status: number, + details?: unknown +): NextResponse { + const body: ApiErrorResponse = { error: message } + if (details !== undefined) { + body.details = details + } + return NextResponse.json(body, { status }) +} + +export function badRequestResponse(message: string, details?: unknown) { + return errorResponse(message, 400, details) +} + +export function unauthorizedResponse(message = 'Authentication required') { + return errorResponse(message, 401) +} + +export function forbiddenResponse(message = 'Access denied') { + return errorResponse(message, 403) +} + +export function notFoundResponse(message = 'Resource not found') { + return errorResponse(message, 404) +} + +export function serverErrorResponse(message = 'Internal server error') { + return errorResponse(message, 500) +} + +export function normalizeColumn(col: ColumnDefinition): ColumnDefinition { + return { + name: col.name, + type: col.type, + required: col.required ?? false, + unique: col.unique ?? false, + } +} diff --git a/apps/sim/app/api/tools/custom/route.test.ts b/apps/sim/app/api/tools/custom/route.test.ts index a5db0632a3..15e26ba506 100644 --- a/apps/sim/app/api/tools/custom/route.test.ts +++ b/apps/sim/app/api/tools/custom/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { createMockRequest, loggerMock } from '@sim/testing' +import { createMockRequest, loggerMock, mockHybridAuth } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -180,13 +180,12 @@ describe('Custom Tools API Routes', () => { getSession: vi.fn().mockResolvedValue(mockSession), })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: true, - userId: 'user-123', - authType: 'session', - }), - })) + const { mockCheckSessionOrInternalAuth: hybridAuthMock } = mockHybridAuth() + hybridAuthMock.mockResolvedValue({ + success: true, + userId: 'user-123', + authType: 'session', + }) vi.doMock('@/lib/workspaces/permissions/utils', () => ({ getUserEntityPermissions: vi.fn().mockResolvedValue('admin'), @@ -261,12 +260,11 @@ describe('Custom Tools API Routes', () => { 'http://localhost:3000/api/tools/custom?workspaceId=workspace-123' ) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: false, - error: 'Unauthorized', - }), - })) + const { mockCheckSessionOrInternalAuth: unauthMock } = mockHybridAuth() + unauthMock.mockResolvedValue({ + success: false, + error: 'Unauthorized', + }) const { GET } = await import('@/app/api/tools/custom/route') @@ -297,12 +295,11 @@ describe('Custom Tools API Routes', () => { */ describe('POST /api/tools/custom', () => { it('should reject unauthorized requests', async () => { - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: false, - error: 'Unauthorized', - }), - })) + const { mockCheckSessionOrInternalAuth: unauthMock } = mockHybridAuth() + unauthMock.mockResolvedValue({ + success: false, + error: 'Unauthorized', + }) const req = createMockRequest('POST', { tools: [], workspaceId: 'workspace-123' }) @@ -384,13 +381,12 @@ describe('Custom Tools API Routes', () => { }) it('should prevent unauthorized deletion of user-scoped tool', async () => { - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: true, - userId: 'user-456', - authType: 'session', - }), - })) + const { mockCheckSessionOrInternalAuth: diffUserMock } = mockHybridAuth() + diffUserMock.mockResolvedValue({ + success: true, + userId: 'user-456', + authType: 'session', + }) const userScopedTool = { ...sampleTools[0], workspaceId: null, userId: 'user-123' } const mockLimitUserScoped = vi.fn().mockResolvedValue([userScopedTool]) @@ -408,12 +404,11 @@ describe('Custom Tools API Routes', () => { }) it('should reject unauthorized requests', async () => { - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: vi.fn().mockResolvedValue({ - success: false, - error: 'Unauthorized', - }), - })) + const { mockCheckSessionOrInternalAuth: unauthMock } = mockHybridAuth() + unauthMock.mockResolvedValue({ + success: false, + error: 'Unauthorized', + }) const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1') diff --git a/apps/sim/app/api/tools/microsoft-dataverse/upload-file/route.ts b/apps/sim/app/api/tools/microsoft-dataverse/upload-file/route.ts new file mode 100644 index 0000000000..003daa064a --- /dev/null +++ b/apps/sim/app/api/tools/microsoft-dataverse/upload-file/route.ts @@ -0,0 +1,145 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas' +import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils' +import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('DataverseUploadFileAPI') + +const DataverseUploadFileSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + environmentUrl: z.string().min(1, 'Environment URL is required'), + entitySetName: z.string().min(1, 'Entity set name is required'), + recordId: z.string().min(1, 'Record ID is required'), + fileColumn: z.string().min(1, 'File column is required'), + fileName: z.string().min(1, 'File name is required'), + file: RawFileInputSchema.optional().nullable(), + fileContent: z.string().optional().nullable(), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Dataverse upload attempt: ${authResult.error}`) + return NextResponse.json( + { success: false, error: authResult.error || 'Authentication required' }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Dataverse upload request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = DataverseUploadFileSchema.parse(body) + + logger.info(`[${requestId}] Uploading file to Dataverse`, { + entitySetName: validatedData.entitySetName, + recordId: validatedData.recordId, + fileColumn: validatedData.fileColumn, + fileName: validatedData.fileName, + hasFile: !!validatedData.file, + hasFileContent: !!validatedData.fileContent, + }) + + let fileBuffer: Buffer + + if (validatedData.file) { + const rawFile = validatedData.file + logger.info(`[${requestId}] Processing UserFile upload: ${rawFile.name}`) + + let userFile + try { + userFile = processSingleFileToUserFile(rawFile, requestId, logger) + } catch (error) { + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Failed to process file', + }, + { status: 400 } + ) + } + + fileBuffer = await downloadFileFromStorage(userFile, requestId, logger) + } else if (validatedData.fileContent) { + fileBuffer = Buffer.from(validatedData.fileContent, 'base64') + } else { + return NextResponse.json( + { success: false, error: 'Either file or fileContent must be provided' }, + { status: 400 } + ) + } + + const baseUrl = validatedData.environmentUrl.replace(/\/$/, '') + const uploadUrl = `${baseUrl}/api/data/v9.2/${validatedData.entitySetName}(${validatedData.recordId})/${validatedData.fileColumn}` + + const response = await fetch(uploadUrl, { + method: 'PATCH', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/octet-stream', + 'OData-MaxVersion': '4.0', + 'OData-Version': '4.0', + 'x-ms-file-name': validatedData.fileName, + }, + body: new Uint8Array(fileBuffer), + }) + + if (!response.ok) { + const errorData = await response.json().catch(() => ({})) + const errorMessage = + errorData?.error?.message ?? + `Dataverse API error: ${response.status} ${response.statusText}` + logger.error(`[${requestId}] Dataverse upload file failed`, { + errorData, + status: response.status, + }) + return NextResponse.json({ success: false, error: errorMessage }, { status: response.status }) + } + + logger.info(`[${requestId}] File uploaded to Dataverse successfully`, { + entitySetName: validatedData.entitySetName, + recordId: validatedData.recordId, + fileColumn: validatedData.fileColumn, + }) + + return NextResponse.json({ + success: true, + output: { + recordId: validatedData.recordId, + fileColumn: validatedData.fileColumn, + fileName: validatedData.fileName, + success: true, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { success: false, error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error uploading file to Dataverse:`, error) + + return NextResponse.json( + { success: false, error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/redis/execute/route.ts b/apps/sim/app/api/tools/redis/execute/route.ts new file mode 100644 index 0000000000..7574c8506d --- /dev/null +++ b/apps/sim/app/api/tools/redis/execute/route.ts @@ -0,0 +1,57 @@ +import { createLogger } from '@sim/logger' +import Redis from 'ioredis' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' + +const logger = createLogger('RedisAPI') + +const RequestSchema = z.object({ + url: z.string().min(1, 'Redis connection URL is required'), + command: z.string().min(1, 'Redis command is required'), + args: z.array(z.union([z.string(), z.number()])).default([]), +}) + +export async function POST(request: NextRequest) { + let client: Redis | null = null + + try { + const auth = await checkInternalAuth(request) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + const body = await request.json() + const { url, command, args } = RequestSchema.parse(body) + + client = new Redis(url, { + connectTimeout: 10000, + commandTimeout: 10000, + maxRetriesPerRequest: 1, + lazyConnect: true, + }) + + await client.connect() + + const cmd = command.toUpperCase() + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const result = await (client as any).call(cmd, ...args) + + await client.quit() + client = null + + return NextResponse.json({ result }) + } catch (error) { + logger.error('Redis command failed', { error }) + const errorMessage = error instanceof Error ? error.message : 'Redis command failed' + return NextResponse.json({ error: errorMessage }, { status: 500 }) + } finally { + if (client) { + try { + await client.quit() + } catch { + client.disconnect() + } + } + } +} diff --git a/apps/sim/app/api/tools/stt/route.ts b/apps/sim/app/api/tools/stt/route.ts index bb5de0310d..2d18e19935 100644 --- a/apps/sim/app/api/tools/stt/route.ts +++ b/apps/sim/app/api/tools/stt/route.ts @@ -766,7 +766,7 @@ async function transcribeWithGemini( const error = await response.json() if (response.status === 404) { throw new Error( - `Model not found: ${modelName}. Use gemini-3-pro-preview, gemini-2.5-pro, gemini-2.5-flash, gemini-2.5-flash-lite, or gemini-2.0-flash-exp` + `Model not found: ${modelName}. Use gemini-3.1-pro-preview, gemini-3-pro-preview, gemini-2.5-pro, gemini-2.5-flash, gemini-2.5-flash-lite, or gemini-2.0-flash-exp` ) } const errorMessage = error.error?.message || JSON.stringify(error) diff --git a/apps/sim/app/api/wand/route.ts b/apps/sim/app/api/wand/route.ts index f6089ff1d2..2bd65fa0ff 100644 --- a/apps/sim/app/api/wand/route.ts +++ b/apps/sim/app/api/wand/route.ts @@ -10,6 +10,7 @@ import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing' import { env } from '@/lib/core/config/env' import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags' import { generateRequestId } from '@/lib/core/utils/request' +import { enrichTableSchema } from '@/lib/table/llm/wand' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' import { extractResponseText, parseResponsesUsage } from '@/providers/openai/utils' import { getModelPricing } from '@/providers/utils' @@ -48,6 +49,7 @@ interface RequestBody { history?: ChatMessage[] workflowId?: string generationType?: string + wandContext?: Record } function safeStringify(value: unknown): string { @@ -58,6 +60,38 @@ function safeStringify(value: unknown): string { } } +/** + * Wand enricher function type. + * Enrichers add context to the system prompt based on generationType. + */ +type WandEnricher = ( + workspaceId: string | null, + context: Record +) => Promise + +/** + * Registry of wand enrichers by generationType. + * Each enricher returns additional context to append to the system prompt. + */ +const wandEnrichers: Partial> = { + timestamp: async () => { + const now = new Date() + return `Current date and time context for reference: +- Current UTC timestamp: ${now.toISOString()} +- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)} +- Current Unix timestamp (milliseconds): ${now.getTime()} +- Current date (UTC): ${now.toISOString().split('T')[0]} +- Current year: ${now.getUTCFullYear()} +- Current month: ${now.getUTCMonth() + 1} +- Current day of month: ${now.getUTCDate()} +- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]} + +Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.` + }, + + 'table-schema': enrichTableSchema, +} + async function updateUserStatsForWand( userId: string, usage: { @@ -147,7 +181,15 @@ export async function POST(req: NextRequest) { try { const body = (await req.json()) as RequestBody - const { prompt, systemPrompt, stream = false, history = [], workflowId, generationType } = body + const { + prompt, + systemPrompt, + stream = false, + history = [], + workflowId, + generationType, + wandContext = {}, + } = body if (!prompt) { logger.warn(`[${requestId}] Invalid request: Missing prompt.`) @@ -222,20 +264,15 @@ export async function POST(req: NextRequest) { systemPrompt || 'You are a helpful AI assistant. Generate content exactly as requested by the user.' - if (generationType === 'timestamp') { - const now = new Date() - const currentTimeContext = `\n\nCurrent date and time context for reference: -- Current UTC timestamp: ${now.toISOString()} -- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)} -- Current Unix timestamp (milliseconds): ${now.getTime()} -- Current date (UTC): ${now.toISOString().split('T')[0]} -- Current year: ${now.getUTCFullYear()} -- Current month: ${now.getUTCMonth() + 1} -- Current day of month: ${now.getUTCDate()} -- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]} - -Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.` - finalSystemPrompt += currentTimeContext + // Apply enricher if one exists for this generationType + if (generationType) { + const enricher = wandEnrichers[generationType] + if (enricher) { + const enrichment = await enricher(workspaceId, wandContext) + if (enrichment) { + finalSystemPrompt += `\n\n${enrichment}` + } + } } if (generationType === 'cron-expression') { diff --git a/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts b/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts index 737e5ac48b..97cabebf61 100644 --- a/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts +++ b/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { createMockRequest, loggerMock } from '@sim/testing' +import { createMockRequest, loggerMock, requestUtilsMock } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' /** Mock execution dependencies for webhook tests */ @@ -348,9 +348,7 @@ vi.mock('postgres', () => vi.fn().mockReturnValue({})) vi.mock('@sim/logger', () => loggerMock) -vi.mock('@/lib/core/utils/request', () => ({ - generateRequestId: vi.fn().mockReturnValue('test-request-id'), -})) +vi.mock('@/lib/core/utils/request', () => requestUtilsMock) process.env.DATABASE_URL = 'postgresql://test:test@localhost:5432/test' diff --git a/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts b/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts index 8e09e10b0f..1d3df876cb 100644 --- a/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts @@ -3,11 +3,11 @@ * * @vitest-environment node */ -import { loggerMock } from '@sim/testing' +import { loggerMock, mockHybridAuth } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' -const mockCheckSessionOrInternalAuth = vi.fn() +let mockCheckSessionOrInternalAuth: ReturnType const mockAuthorizeWorkflowByWorkspacePermission = vi.fn() const mockDbSelect = vi.fn() const mockDbFrom = vi.fn() @@ -48,9 +48,7 @@ describe('Workflow Chat Status Route', () => { workflowId: 'workflowId', }, })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth, - })) + ;({ mockCheckSessionOrInternalAuth } = mockHybridAuth()) vi.doMock('@/lib/workflows/utils', () => ({ authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission, })) diff --git a/apps/sim/app/api/workflows/[id]/form/status/route.test.ts b/apps/sim/app/api/workflows/[id]/form/status/route.test.ts index d4274a6faf..4ab4b2a5dc 100644 --- a/apps/sim/app/api/workflows/[id]/form/status/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/form/status/route.test.ts @@ -3,11 +3,11 @@ * * @vitest-environment node */ -import { loggerMock } from '@sim/testing' +import { loggerMock, mockHybridAuth } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' -const mockCheckSessionOrInternalAuth = vi.fn() +let mockCheckSessionOrInternalAuth: ReturnType const mockAuthorizeWorkflowByWorkspacePermission = vi.fn() const mockDbSelect = vi.fn() const mockDbFrom = vi.fn() @@ -43,9 +43,7 @@ describe('Workflow Form Status Route', () => { isActive: 'isActive', }, })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth, - })) + ;({ mockCheckSessionOrInternalAuth } = mockHybridAuth()) vi.doMock('@/lib/workflows/utils', () => ({ authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission, })) diff --git a/apps/sim/app/api/workflows/[id]/route.test.ts b/apps/sim/app/api/workflows/[id]/route.test.ts index 3595a26850..fba05c92c9 100644 --- a/apps/sim/app/api/workflows/[id]/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/route.test.ts @@ -5,11 +5,19 @@ * @vitest-environment node */ -import { auditMock, loggerMock, setupGlobalFetchMock } from '@sim/testing' +import { + auditMock, + envMock, + loggerMock, + requestUtilsMock, + setupGlobalFetchMock, + telemetryMock, +} from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' -const mockGetSession = vi.fn() +const mockCheckHybridAuth = vi.fn() +const mockCheckSessionOrInternalAuth = vi.fn() const mockLoadWorkflowFromNormalizedTables = vi.fn() const mockGetWorkflowById = vi.fn() const mockAuthorizeWorkflowByWorkspacePermission = vi.fn() @@ -17,10 +25,34 @@ const mockDbDelete = vi.fn() const mockDbUpdate = vi.fn() const mockDbSelect = vi.fn() +/** + * Helper to set mock auth state consistently across getSession and hybrid auth. + */ +function mockGetSession(session: { user: { id: string } } | null) { + if (session) { + mockCheckHybridAuth.mockResolvedValue({ success: true, userId: session.user.id }) + mockCheckSessionOrInternalAuth.mockResolvedValue({ success: true, userId: session.user.id }) + } else { + mockCheckHybridAuth.mockResolvedValue({ success: false }) + mockCheckSessionOrInternalAuth.mockResolvedValue({ success: false }) + } +} + vi.mock('@/lib/auth', () => ({ - getSession: () => mockGetSession(), + getSession: vi.fn(), })) +vi.mock('@/lib/auth/hybrid', () => ({ + checkHybridAuth: (...args: unknown[]) => mockCheckHybridAuth(...args), + checkSessionOrInternalAuth: (...args: unknown[]) => mockCheckSessionOrInternalAuth(...args), +})) + +vi.mock('@/lib/core/config/env', () => envMock) + +vi.mock('@/lib/core/telemetry', () => telemetryMock) + +vi.mock('@/lib/core/utils/request', () => requestUtilsMock) + vi.mock('@sim/logger', () => loggerMock) vi.mock('@/lib/audit/log', () => auditMock) @@ -30,20 +62,14 @@ vi.mock('@/lib/workflows/persistence/utils', () => ({ mockLoadWorkflowFromNormalizedTables(workflowId), })) -vi.mock('@/lib/workflows/utils', async () => { - const actual = - await vi.importActual('@/lib/workflows/utils') - - return { - ...actual, - getWorkflowById: (workflowId: string) => mockGetWorkflowById(workflowId), - authorizeWorkflowByWorkspacePermission: (params: { - workflowId: string - userId: string - action?: 'read' | 'write' | 'admin' - }) => mockAuthorizeWorkflowByWorkspacePermission(params), - } -}) +vi.mock('@/lib/workflows/utils', () => ({ + getWorkflowById: (workflowId: string) => mockGetWorkflowById(workflowId), + authorizeWorkflowByWorkspacePermission: (params: { + workflowId: string + userId: string + action?: 'read' | 'write' | 'admin' + }) => mockAuthorizeWorkflowByWorkspacePermission(params), +})) vi.mock('@sim/db', () => ({ db: { @@ -73,7 +99,7 @@ describe('Workflow By ID API Route', () => { describe('GET /api/workflows/[id]', () => { it('should return 401 when user is not authenticated', async () => { - mockGetSession.mockResolvedValue(null) + mockGetSession(null) const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123') const params = Promise.resolve({ id: 'workflow-123' }) @@ -86,9 +112,7 @@ describe('Workflow By ID API Route', () => { }) it('should return 404 when workflow does not exist', async () => { - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(null) @@ -118,9 +142,7 @@ describe('Workflow By ID API Route', () => { isFromNormalizedTables: true, } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -158,9 +180,7 @@ describe('Workflow By ID API Route', () => { isFromNormalizedTables: true, } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -190,9 +210,7 @@ describe('Workflow By ID API Route', () => { workspaceId: 'workspace-456', } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -229,9 +247,7 @@ describe('Workflow By ID API Route', () => { isFromNormalizedTables: true, } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -264,9 +280,7 @@ describe('Workflow By ID API Route', () => { workspaceId: 'workspace-456', } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -308,9 +322,7 @@ describe('Workflow By ID API Route', () => { workspaceId: 'workspace-456', } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -353,9 +365,7 @@ describe('Workflow By ID API Route', () => { workspaceId: 'workspace-456', } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -392,9 +402,7 @@ describe('Workflow By ID API Route', () => { workspaceId: 'workspace-456', } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -419,6 +427,16 @@ describe('Workflow By ID API Route', () => { }) describe('PUT /api/workflows/[id]', () => { + function mockDuplicateCheck(results: Array<{ id: string }> = []) { + mockDbSelect.mockReturnValue({ + from: vi.fn().mockReturnValue({ + where: vi.fn().mockReturnValue({ + limit: vi.fn().mockResolvedValue(results), + }), + }), + }) + } + it('should allow user with write permission to update workflow', async () => { const mockWorkflow = { id: 'workflow-123', @@ -430,9 +448,7 @@ describe('Workflow By ID API Route', () => { const updateData = { name: 'Updated Workflow' } const updatedWorkflow = { ...mockWorkflow, ...updateData, updatedAt: new Date() } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -442,6 +458,8 @@ describe('Workflow By ID API Route', () => { workspacePermission: 'write', }) + mockDuplicateCheck([]) + mockDbUpdate.mockReturnValue({ set: vi.fn().mockReturnValue({ where: vi.fn().mockReturnValue({ @@ -474,9 +492,7 @@ describe('Workflow By ID API Route', () => { const updateData = { name: 'Updated Workflow' } const updatedWorkflow = { ...mockWorkflow, ...updateData, updatedAt: new Date() } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -486,6 +502,8 @@ describe('Workflow By ID API Route', () => { workspacePermission: 'write', }) + mockDuplicateCheck([]) + mockDbUpdate.mockReturnValue({ set: vi.fn().mockReturnValue({ where: vi.fn().mockReturnValue({ @@ -517,9 +535,7 @@ describe('Workflow By ID API Route', () => { const updateData = { name: 'Updated Workflow' } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -551,9 +567,7 @@ describe('Workflow By ID API Route', () => { workspaceId: 'workspace-456', } - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockResolvedValue(mockWorkflow) mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ @@ -577,13 +591,238 @@ describe('Workflow By ID API Route', () => { const data = await response.json() expect(data.error).toBe('Invalid request data') }) + + it('should reject rename when duplicate name exists in same folder', async () => { + const mockWorkflow = { + id: 'workflow-123', + userId: 'user-123', + name: 'Original Name', + folderId: 'folder-1', + workspaceId: 'workspace-456', + } + + mockGetSession({ user: { id: 'user-123' } }) + mockGetWorkflowById.mockResolvedValue(mockWorkflow) + mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + status: 200, + workflow: mockWorkflow, + workspacePermission: 'write', + }) + + mockDuplicateCheck([{ id: 'workflow-other' }]) + + const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { + method: 'PUT', + body: JSON.stringify({ name: 'Duplicate Name' }), + }) + const params = Promise.resolve({ id: 'workflow-123' }) + + const response = await PUT(req, { params }) + + expect(response.status).toBe(409) + const data = await response.json() + expect(data.error).toBe('A workflow named "Duplicate Name" already exists in this folder') + }) + + it('should reject rename when duplicate name exists at root level', async () => { + const mockWorkflow = { + id: 'workflow-123', + userId: 'user-123', + name: 'Original Name', + folderId: null, + workspaceId: 'workspace-456', + } + + mockGetSession({ user: { id: 'user-123' } }) + mockGetWorkflowById.mockResolvedValue(mockWorkflow) + mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + status: 200, + workflow: mockWorkflow, + workspacePermission: 'write', + }) + + mockDuplicateCheck([{ id: 'workflow-other' }]) + + const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { + method: 'PUT', + body: JSON.stringify({ name: 'Duplicate Name' }), + }) + const params = Promise.resolve({ id: 'workflow-123' }) + + const response = await PUT(req, { params }) + + expect(response.status).toBe(409) + const data = await response.json() + expect(data.error).toBe('A workflow named "Duplicate Name" already exists in this folder') + }) + + it('should allow rename when no duplicate exists in same folder', async () => { + const mockWorkflow = { + id: 'workflow-123', + userId: 'user-123', + name: 'Original Name', + folderId: 'folder-1', + workspaceId: 'workspace-456', + } + + const updatedWorkflow = { ...mockWorkflow, name: 'Unique Name', updatedAt: new Date() } + + mockGetSession({ user: { id: 'user-123' } }) + mockGetWorkflowById.mockResolvedValue(mockWorkflow) + mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + status: 200, + workflow: mockWorkflow, + workspacePermission: 'write', + }) + + mockDuplicateCheck([]) + + mockDbUpdate.mockReturnValue({ + set: vi.fn().mockReturnValue({ + where: vi.fn().mockReturnValue({ + returning: vi.fn().mockResolvedValue([updatedWorkflow]), + }), + }), + }) + + const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { + method: 'PUT', + body: JSON.stringify({ name: 'Unique Name' }), + }) + const params = Promise.resolve({ id: 'workflow-123' }) + + const response = await PUT(req, { params }) + + expect(response.status).toBe(200) + const data = await response.json() + expect(data.workflow.name).toBe('Unique Name') + }) + + it('should allow same name in different folders', async () => { + const mockWorkflow = { + id: 'workflow-123', + userId: 'user-123', + name: 'My Workflow', + folderId: 'folder-1', + workspaceId: 'workspace-456', + } + + const updatedWorkflow = { ...mockWorkflow, folderId: 'folder-2', updatedAt: new Date() } + + mockGetSession({ user: { id: 'user-123' } }) + mockGetWorkflowById.mockResolvedValue(mockWorkflow) + mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + status: 200, + workflow: mockWorkflow, + workspacePermission: 'write', + }) + + // No duplicate in target folder + mockDuplicateCheck([]) + + mockDbUpdate.mockReturnValue({ + set: vi.fn().mockReturnValue({ + where: vi.fn().mockReturnValue({ + returning: vi.fn().mockResolvedValue([updatedWorkflow]), + }), + }), + }) + + const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { + method: 'PUT', + body: JSON.stringify({ folderId: 'folder-2' }), + }) + const params = Promise.resolve({ id: 'workflow-123' }) + + const response = await PUT(req, { params }) + + expect(response.status).toBe(200) + const data = await response.json() + expect(data.workflow.folderId).toBe('folder-2') + }) + + it('should reject moving to a folder where same name already exists', async () => { + const mockWorkflow = { + id: 'workflow-123', + userId: 'user-123', + name: 'My Workflow', + folderId: 'folder-1', + workspaceId: 'workspace-456', + } + + mockGetSession({ user: { id: 'user-123' } }) + mockGetWorkflowById.mockResolvedValue(mockWorkflow) + mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + status: 200, + workflow: mockWorkflow, + workspacePermission: 'write', + }) + + // Duplicate exists in target folder + mockDuplicateCheck([{ id: 'workflow-other' }]) + + const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { + method: 'PUT', + body: JSON.stringify({ folderId: 'folder-2' }), + }) + const params = Promise.resolve({ id: 'workflow-123' }) + + const response = await PUT(req, { params }) + + expect(response.status).toBe(409) + const data = await response.json() + expect(data.error).toBe('A workflow named "My Workflow" already exists in this folder') + }) + + it('should skip duplicate check when only updating non-name/non-folder fields', async () => { + const mockWorkflow = { + id: 'workflow-123', + userId: 'user-123', + name: 'Test Workflow', + workspaceId: 'workspace-456', + } + + const updatedWorkflow = { ...mockWorkflow, color: '#FF0000', updatedAt: new Date() } + + mockGetSession({ user: { id: 'user-123' } }) + mockGetWorkflowById.mockResolvedValue(mockWorkflow) + mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + status: 200, + workflow: mockWorkflow, + workspacePermission: 'write', + }) + + mockDbUpdate.mockReturnValue({ + set: vi.fn().mockReturnValue({ + where: vi.fn().mockReturnValue({ + returning: vi.fn().mockResolvedValue([updatedWorkflow]), + }), + }), + }) + + const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', { + method: 'PUT', + body: JSON.stringify({ color: '#FF0000' }), + }) + const params = Promise.resolve({ id: 'workflow-123' }) + + const response = await PUT(req, { params }) + + expect(response.status).toBe(200) + // db.select should NOT have been called since no name/folder change + expect(mockDbSelect).not.toHaveBeenCalled() + }) }) describe('Error handling', () => { it.concurrent('should handle database errors gracefully', async () => { - mockGetSession.mockResolvedValue({ - user: { id: 'user-123' }, - }) + mockGetSession({ user: { id: 'user-123' } }) mockGetWorkflowById.mockRejectedValue(new Error('Database connection timeout')) diff --git a/apps/sim/app/api/workflows/[id]/route.ts b/apps/sim/app/api/workflows/[id]/route.ts index 170dc83faf..e2621d6e75 100644 --- a/apps/sim/app/api/workflows/[id]/route.ts +++ b/apps/sim/app/api/workflows/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { templates, webhook, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull, ne } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -411,6 +411,45 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ if (updates.folderId !== undefined) updateData.folderId = updates.folderId if (updates.sortOrder !== undefined) updateData.sortOrder = updates.sortOrder + if (updates.name !== undefined || updates.folderId !== undefined) { + const targetName = updates.name ?? workflowData.name + const targetFolderId = + updates.folderId !== undefined ? updates.folderId : workflowData.folderId + + if (!workflowData.workspaceId) { + logger.error(`[${requestId}] Workflow ${workflowId} has no workspaceId`) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } + + const conditions = [ + eq(workflow.workspaceId, workflowData.workspaceId), + eq(workflow.name, targetName), + ne(workflow.id, workflowId), + ] + + if (targetFolderId) { + conditions.push(eq(workflow.folderId, targetFolderId)) + } else { + conditions.push(isNull(workflow.folderId)) + } + + const [duplicate] = await db + .select({ id: workflow.id }) + .from(workflow) + .where(and(...conditions)) + .limit(1) + + if (duplicate) { + logger.warn( + `[${requestId}] Duplicate workflow name "${targetName}" in folder ${targetFolderId ?? 'root'}` + ) + return NextResponse.json( + { error: `A workflow named "${targetName}" already exists in this folder` }, + { status: 409 } + ) + } + } + // Update the workflow const [updatedWorkflow] = await db .update(workflow) diff --git a/apps/sim/app/api/workflows/route.test.ts b/apps/sim/app/api/workflows/route.test.ts index ddef020dc6..9920a7b71c 100644 --- a/apps/sim/app/api/workflows/route.test.ts +++ b/apps/sim/app/api/workflows/route.test.ts @@ -1,11 +1,16 @@ /** * @vitest-environment node */ -import { auditMock, createMockRequest, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing' +import { + auditMock, + createMockRequest, + mockConsoleLogger, + mockHybridAuth, + setupCommonApiMocks, +} from '@sim/testing' import { drizzleOrmMock } from '@sim/testing/mocks' import { beforeEach, describe, expect, it, vi } from 'vitest' -const mockCheckSessionOrInternalAuth = vi.fn() const mockGetUserEntityPermissions = vi.fn() const mockDbSelect = vi.fn() const mockDbInsert = vi.fn() @@ -30,6 +35,7 @@ describe('Workflows API Route - POST ordering', () => { randomUUID: vi.fn().mockReturnValue('workflow-new-id'), }) + const { mockCheckSessionOrInternalAuth } = mockHybridAuth() mockCheckSessionOrInternalAuth.mockResolvedValue({ success: true, userId: 'user-123', @@ -45,10 +51,6 @@ describe('Workflows API Route - POST ordering', () => { }, })) - vi.doMock('@/lib/auth/hybrid', () => ({ - checkSessionOrInternalAuth: (...args: unknown[]) => mockCheckSessionOrInternalAuth(...args), - })) - vi.doMock('@/lib/workspaces/permissions/utils', () => ({ getUserEntityPermissions: (...args: unknown[]) => mockGetUserEntityPermissions(...args), workspaceExists: vi.fn(), diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/action-bar.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/action-bar.tsx new file mode 100644 index 0000000000..c5472658aa --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/action-bar.tsx @@ -0,0 +1,31 @@ +'use client' + +import { Trash2, X } from 'lucide-react' +import { Button } from '@/components/emcn' + +interface ActionBarProps { + selectedCount: number + onDelete: () => void + onClearSelection: () => void +} + +export function ActionBar({ selectedCount, onDelete, onClearSelection }: ActionBarProps) { + return ( +
+
+ + {selectedCount} {selectedCount === 1 ? 'row' : 'rows'} selected + + +
+ + +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/body-states.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/body-states.tsx new file mode 100644 index 0000000000..d6b3cbd0c0 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/body-states.tsx @@ -0,0 +1,72 @@ +import { Plus } from 'lucide-react' +import { Button, TableCell, TableRow } from '@/components/emcn' +import { Skeleton } from '@/components/ui/skeleton' +import type { ColumnDefinition } from '@/lib/table' + +interface LoadingRowsProps { + columns: ColumnDefinition[] +} + +export function LoadingRows({ columns }: LoadingRowsProps) { + return ( + <> + {Array.from({ length: 25 }).map((_, rowIndex) => ( + + + + + {columns.map((col, colIndex) => { + const baseWidth = + col.type === 'json' + ? 200 + : col.type === 'string' + ? 160 + : col.type === 'number' + ? 80 + : col.type === 'boolean' + ? 50 + : col.type === 'date' + ? 100 + : 120 + const variation = ((rowIndex + colIndex) % 3) * 20 + const width = baseWidth + variation + + return ( + + + + ) + })} + + ))} + + ) +} + +interface EmptyRowsProps { + columnCount: number + hasFilter: boolean + onAddRow: () => void +} + +export function EmptyRows({ columnCount, hasFilter, onAddRow }: EmptyRowsProps) { + return ( + + +
+
+ + {hasFilter ? 'No rows match your filter' : 'No data'} + + {!hasFilter && ( + + )} +
+
+
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/cell-renderer.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/cell-renderer.tsx new file mode 100644 index 0000000000..f4e97e555e --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/cell-renderer.tsx @@ -0,0 +1,99 @@ +import type { ColumnDefinition } from '@/lib/table' +import { STRING_TRUNCATE_LENGTH } from '../lib/constants' +import type { CellViewerData } from '../lib/types' + +interface CellRendererProps { + value: unknown + column: ColumnDefinition + onCellClick: (columnName: string, value: unknown, type: CellViewerData['type']) => void +} + +export function CellRenderer({ value, column, onCellClick }: CellRendererProps) { + const isNull = value === null || value === undefined + + if (isNull) { + return + } + + if (column.type === 'json') { + const jsonStr = JSON.stringify(value) + return ( + + ) + } + + if (column.type === 'boolean') { + const boolValue = Boolean(value) + return ( + + {boolValue ? 'true' : 'false'} + + ) + } + + if (column.type === 'number') { + return ( + {String(value)} + ) + } + + if (column.type === 'date') { + try { + const date = new Date(String(value)) + const formatted = date.toLocaleDateString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit', + }) + return ( + + ) + } catch { + return {String(value)} + } + } + + const strValue = String(value) + if (strValue.length > STRING_TRUNCATE_LENGTH) { + return ( + + ) + } + + return {strValue} +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/cell-viewer-modal.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/cell-viewer-modal.tsx new file mode 100644 index 0000000000..8139d22732 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/cell-viewer-modal.tsx @@ -0,0 +1,84 @@ +import { Copy, X } from 'lucide-react' +import { Badge, Button, Modal, ModalBody, ModalContent } from '@/components/emcn' +import type { CellViewerData } from '../lib/types' + +interface CellViewerModalProps { + cellViewer: CellViewerData | null + onClose: () => void + onCopy: () => void + copied: boolean +} + +export function CellViewerModal({ cellViewer, onClose, onCopy, copied }: CellViewerModalProps) { + if (!cellViewer) return null + + return ( + !open && onClose()}> + +
+
+ + {cellViewer.columnName} + + + {cellViewer.type === 'json' ? 'JSON' : cellViewer.type === 'date' ? 'Date' : 'Text'} + +
+
+ + +
+
+ + {cellViewer.type === 'json' ? ( +
+              {JSON.stringify(cellViewer.value, null, 2)}
+            
+ ) : cellViewer.type === 'date' ? ( +
+
+
+ Formatted +
+
+ {new Date(String(cellViewer.value)).toLocaleDateString('en-US', { + weekday: 'long', + year: 'numeric', + month: 'long', + day: 'numeric', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + timeZoneName: 'short', + })} +
+
+
+
+ ISO Format +
+
+ {String(cellViewer.value)} +
+
+
+ ) : ( +
+ {String(cellViewer.value)} +
+ )} +
+
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu.tsx new file mode 100644 index 0000000000..eb1dcb8989 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu.tsx @@ -0,0 +1,49 @@ +import { Edit, Trash2 } from 'lucide-react' +import { + Popover, + PopoverAnchor, + PopoverContent, + PopoverDivider, + PopoverItem, +} from '@/components/emcn' +import type { ContextMenuState } from '../lib/types' + +interface ContextMenuProps { + contextMenu: ContextMenuState + onClose: () => void + onEdit: () => void + onDelete: () => void +} + +export function ContextMenu({ contextMenu, onClose, onEdit, onDelete }: ContextMenuProps) { + return ( + !open && onClose()} + variant='secondary' + size='sm' + colorScheme='inverted' + > + + + + + Edit row + + + + + Delete row + + + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/header-bar.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/header-bar.tsx new file mode 100644 index 0000000000..eb589cb0b6 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/header-bar.tsx @@ -0,0 +1,63 @@ +import { Info, RefreshCw } from 'lucide-react' +import { Badge, Button, Tooltip } from '@/components/emcn' +import { Skeleton } from '@/components/ui/skeleton' + +interface HeaderBarProps { + tableName: string + totalCount: number + isLoading: boolean + onNavigateBack: () => void + onShowSchema: () => void + onRefresh: () => void +} + +export function HeaderBar({ + tableName, + totalCount, + isLoading, + onNavigateBack, + onShowSchema, + onRefresh, +}: HeaderBarProps) { + return ( +
+
+ + / + {tableName} + {isLoading ? ( + + ) : ( + + {totalCount} {totalCount === 1 ? 'row' : 'rows'} + + )} +
+ +
+ + + + + View Schema + + + + + + + Refresh + +
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/index.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/index.ts new file mode 100644 index 0000000000..e4594de055 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/index.ts @@ -0,0 +1,11 @@ +export * from './action-bar' +export * from './body-states' +export * from './cell-renderer' +export * from './cell-viewer-modal' +export * from './context-menu' +export * from './header-bar' +export * from './pagination' +export * from './query-builder' +export * from './row-modal' +export * from './schema-modal' +export * from './table-viewer' diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/pagination.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/pagination.tsx new file mode 100644 index 0000000000..e73256a63f --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/pagination.tsx @@ -0,0 +1,40 @@ +import { Button } from '@/components/emcn' + +interface PaginationProps { + currentPage: number + totalPages: number + totalCount: number + onPreviousPage: () => void + onNextPage: () => void +} + +export function Pagination({ + currentPage, + totalPages, + totalCount, + onPreviousPage, + onNextPage, +}: PaginationProps) { + if (totalPages <= 1) return null + + return ( +
+ + Page {currentPage + 1} of {totalPages} ({totalCount} rows) + +
+ + +
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/filter-row.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/filter-row.tsx new file mode 100644 index 0000000000..a54beec61b --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/filter-row.tsx @@ -0,0 +1,89 @@ +'use client' + +import { X } from 'lucide-react' +import { Button, Combobox, Input } from '@/components/emcn' +import type { FilterRule } from '@/lib/table/query-builder/constants' + +interface FilterRowProps { + rule: FilterRule + index: number + columnOptions: Array<{ value: string; label: string }> + comparisonOptions: Array<{ value: string; label: string }> + logicalOptions: Array<{ value: string; label: string }> + onUpdate: (id: string, field: keyof FilterRule, value: string) => void + onRemove: (id: string) => void + onApply: () => void +} + +export function FilterRow({ + rule, + index, + columnOptions, + comparisonOptions, + logicalOptions, + onUpdate, + onRemove, + onApply, +}: FilterRowProps) { + return ( +
+ + +
+ {index === 0 ? ( + + ) : ( + onUpdate(rule.id, 'logicalOperator', value as 'and' | 'or')} + /> + )} +
+ +
+ onUpdate(rule.id, 'column', value)} + placeholder='Column' + /> +
+ +
+ onUpdate(rule.id, 'operator', value)} + /> +
+ + onUpdate(rule.id, 'value', e.target.value)} + placeholder='Value' + onKeyDown={(e) => { + if (e.key === 'Enter') { + onApply() + } + }} + /> +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/index.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/index.tsx new file mode 100644 index 0000000000..379a769fcf --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/index.tsx @@ -0,0 +1,137 @@ +'use client' + +import { useCallback, useMemo, useState } from 'react' +import { ArrowUpAZ, Loader2, Plus } from 'lucide-react' +import { nanoid } from 'nanoid' +import { Button } from '@/components/emcn' +import type { FilterRule, SortRule } from '@/lib/table/query-builder/constants' +import { filterRulesToFilter, sortRuleToSort } from '@/lib/table/query-builder/converters' +import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder' +import type { ColumnDefinition } from '@/lib/table/types' +import type { QueryOptions } from '../../lib/types' +import { FilterRow } from './filter-row' +import { SortRow } from './sort-row' + +type Column = Pick + +interface QueryBuilderProps { + columns: Column[] + onApply: (options: QueryOptions) => void + onAddRow: () => void + isLoading?: boolean +} + +export function QueryBuilder({ columns, onApply, onAddRow, isLoading = false }: QueryBuilderProps) { + const [rules, setRules] = useState([]) + const [sortRule, setSortRule] = useState(null) + + const columnOptions = useMemo( + () => columns.map((col) => ({ value: col.name, label: col.name })), + [columns] + ) + + const { + comparisonOptions, + logicalOptions, + sortDirectionOptions, + addRule: handleAddRule, + removeRule: handleRemoveRule, + updateRule: handleUpdateRule, + } = useFilterBuilder({ + columns: columnOptions, + rules, + setRules, + }) + + const handleAddSort = useCallback(() => { + setSortRule({ + id: nanoid(), + column: columns[0]?.name || '', + direction: 'asc', + }) + }, [columns]) + + const handleRemoveSort = useCallback(() => { + setSortRule(null) + }, []) + + const handleApply = useCallback(() => { + const filter = filterRulesToFilter(rules) + const sort = sortRuleToSort(sortRule) + onApply({ filter, sort }) + }, [rules, sortRule, onApply]) + + const handleClear = useCallback(() => { + setRules([]) + setSortRule(null) + onApply({ + filter: null, + sort: null, + }) + }, [onApply]) + + const hasChanges = rules.length > 0 || sortRule !== null + + return ( +
+ {rules.map((rule, index) => ( + + ))} + + {sortRule && ( + + )} + +
+ + + + + {!sortRule && ( + + )} + + {hasChanges && ( + <> + + + + + )} +
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/sort-row.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/sort-row.tsx new file mode 100644 index 0000000000..5e0641be74 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/query-builder/sort-row.tsx @@ -0,0 +1,65 @@ +'use client' + +import { ArrowDownAZ, ArrowUpAZ, X } from 'lucide-react' +import { Button, Combobox } from '@/components/emcn' +import type { SortRule } from '@/lib/table/query-builder/constants' + +interface SortRowProps { + sortRule: SortRule + columnOptions: Array<{ value: string; label: string }> + sortDirectionOptions: Array<{ value: string; label: string }> + onChange: (rule: SortRule | null) => void + onRemove: () => void +} + +export function SortRow({ + sortRule, + columnOptions, + sortDirectionOptions, + onChange, + onRemove, +}: SortRowProps) { + return ( +
+ + +
+ +
+ +
+ onChange({ ...sortRule, column: value })} + placeholder='Column' + /> +
+ +
+ onChange({ ...sortRule, direction: value as 'asc' | 'desc' })} + /> +
+ +
+ {sortRule.direction === 'asc' ? ( + + ) : ( + + )} +
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/row-modal.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/row-modal.tsx new file mode 100644 index 0000000000..ae133b482a --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/row-modal.tsx @@ -0,0 +1,363 @@ +'use client' + +import { useState } from 'react' +import { createLogger } from '@sim/logger' +import { AlertCircle } from 'lucide-react' +import { useParams } from 'next/navigation' +import { + Button, + Checkbox, + Input, + Label, + Modal, + ModalBody, + ModalContent, + ModalFooter, + ModalHeader, + Textarea, +} from '@/components/emcn' +import type { ColumnDefinition, TableInfo, TableRow } from '@/lib/table' +import { + useCreateTableRow, + useDeleteTableRow, + useDeleteTableRows, + useUpdateTableRow, +} from '@/hooks/queries/tables' + +const logger = createLogger('RowModal') + +export interface RowModalProps { + mode: 'add' | 'edit' | 'delete' + isOpen: boolean + onClose: () => void + table: TableInfo + row?: TableRow + rowIds?: string[] + onSuccess: () => void +} + +function createInitialRowData(columns: ColumnDefinition[]): Record { + const initial: Record = {} + columns.forEach((col) => { + if (col.type === 'boolean') { + initial[col.name] = false + } else { + initial[col.name] = '' + } + }) + return initial +} + +function cleanRowData( + columns: ColumnDefinition[], + rowData: Record +): Record { + const cleanData: Record = {} + + columns.forEach((col) => { + const value = rowData[col.name] + if (col.type === 'number') { + cleanData[col.name] = value === '' ? null : Number(value) + } else if (col.type === 'json') { + if (typeof value === 'string') { + if (value === '') { + cleanData[col.name] = null + } else { + try { + cleanData[col.name] = JSON.parse(value) + } catch { + throw new Error(`Invalid JSON for field: ${col.name}`) + } + } + } else { + cleanData[col.name] = value + } + } else if (col.type === 'boolean') { + cleanData[col.name] = Boolean(value) + } else { + cleanData[col.name] = value || null + } + }) + + return cleanData +} + +function formatValueForInput(value: unknown, type: string): string { + if (value === null || value === undefined) return '' + if (type === 'json') { + return typeof value === 'string' ? value : JSON.stringify(value, null, 2) + } + if (type === 'date' && value) { + try { + const date = new Date(String(value)) + return date.toISOString().split('T')[0] + } catch { + return String(value) + } + } + return String(value) +} + +function getInitialRowData( + mode: RowModalProps['mode'], + columns: ColumnDefinition[], + row?: TableRow +): Record { + if (mode === 'add' && columns.length > 0) { + return createInitialRowData(columns) + } + if (mode === 'edit' && row) { + return row.data + } + return {} +} + +export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess }: RowModalProps) { + const params = useParams() + const workspaceId = params.workspaceId as string + const tableId = table.id + + const schema = table?.schema + const columns = schema?.columns || [] + + const [rowData, setRowData] = useState>(() => + getInitialRowData(mode, columns, row) + ) + const [error, setError] = useState(null) + const createRowMutation = useCreateTableRow({ workspaceId, tableId }) + const updateRowMutation = useUpdateTableRow({ workspaceId, tableId }) + const deleteRowMutation = useDeleteTableRow({ workspaceId, tableId }) + const deleteRowsMutation = useDeleteTableRows({ workspaceId, tableId }) + const isSubmitting = + createRowMutation.isPending || + updateRowMutation.isPending || + deleteRowMutation.isPending || + deleteRowsMutation.isPending + + const handleFormSubmit = async (e: React.FormEvent) => { + e.preventDefault() + setError(null) + + try { + const cleanData = cleanRowData(columns, rowData) + + if (mode === 'add') { + await createRowMutation.mutateAsync(cleanData) + } else if (mode === 'edit' && row) { + await updateRowMutation.mutateAsync({ rowId: row.id, data: cleanData }) + } + + onSuccess() + } catch (err) { + logger.error(`Failed to ${mode} row:`, err) + setError(err instanceof Error ? err.message : `Failed to ${mode} row`) + } + } + + const handleDelete = async () => { + setError(null) + + const idsToDelete = rowIds ?? (row ? [row.id] : []) + + try { + if (idsToDelete.length === 1) { + await deleteRowMutation.mutateAsync(idsToDelete[0]) + } else { + await deleteRowsMutation.mutateAsync(idsToDelete) + } + + onSuccess() + } catch (err) { + logger.error('Failed to delete row(s):', err) + setError(err instanceof Error ? err.message : 'Failed to delete row(s)') + } + } + + const handleClose = () => { + setRowData({}) + setError(null) + onClose() + } + + // Delete mode UI + if (mode === 'delete') { + const deleteCount = rowIds?.length ?? (row ? 1 : 0) + const isSingleRow = deleteCount === 1 + + return ( + + + +
+
+ +
+

+ Delete {isSingleRow ? 'Row' : `${deleteCount} Rows`} +

+
+
+ +
+ +

+ Are you sure you want to delete {isSingleRow ? 'this row' : 'these rows'}? This + action cannot be undone. +

+
+
+ + + + +
+
+ ) + } + + const isAddMode = mode === 'add' + + return ( + + + +
+

{isAddMode ? 'Add New Row' : 'Edit Row'}

+

+ {isAddMode ? 'Fill in the values for' : 'Update values for'} {table?.name ?? 'table'} +

+
+
+ +
+ + + {columns.map((column) => ( + setRowData((prev) => ({ ...prev, [column.name]: value }))} + /> + ))} + +
+ + + + +
+
+ ) +} + +function ErrorMessage({ error }: { error: string | null }) { + if (!error) return null + + return ( +
+ {error} +
+ ) +} + +interface ColumnFieldProps { + column: ColumnDefinition + value: unknown + onChange: (value: unknown) => void +} + +function ColumnField({ column, value, onChange }: ColumnFieldProps) { + return ( +
+ + + {column.type === 'boolean' ? ( +
+ onChange(checked === true)} + /> + +
+ ) : column.type === 'json' ? ( +